diff --git a/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md b/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md index a929d062d2..0fc2521a76 100644 --- a/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md +++ b/docs/errorcode/linkis-engineconn-plugin-core-errorcode.md @@ -3,7 +3,7 @@ | module name(模块名) | error code(错误码) | describe(描述) |enumeration name(枚举)| Exception Class(类名)| | -------- | -------- | ----- |-----|-----| |linkis-engineconn-plugin-core |10001|Failed to createEngineConnLaunchRequest(创建 EngineConnLaunchRequest失败)|FAILED_CREATE_ELR|EngineconnCoreErrorCodeSummary| -|linkis-engineconn-plugin-core |10001|The engine plug-in material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)|EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION|EngineconnCoreErrorCodeSummary| +|linkis-engineconn-plugin-core |10001|The engine plugin material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)|EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION|EngineconnCoreErrorCodeSummary| |linkis-engineconn-plugin-core |10001|EngineTypeLabel are requested(需要参数 EngineTypeLabel)|ETL_REQUESTED|EngineconnCoreErrorCodeSummary| |linkis-engineconn-plugin-core |20000|Cannot instance EngineConnExecution(无法实例化 EngineConnExecution)|CANNOT_INSTANCE_ECE|EngineconnCoreErrorCodeSummary| |linkis-engineconn-plugin-core |20000|Cannot find default ExecutorFactory(找不到默认的 ExecutorFactory)|CANNOT_DEFAULT_EF|EngineconnCoreErrorCodeSummary| diff --git a/docs/errorcode/python-errorcode.md b/docs/errorcode/python-errorcode.md index 8a4f843484..cf63aaa3fe 100644 --- a/docs/errorcode/python-errorcode.md +++ b/docs/errorcode/python-errorcode.md @@ -1,7 +1,7 @@ ## python errorcode -| 模块名(服务名) | 错误码 | 描述 |enumeration name| Exception Class| -| -------- | -------- | ----- |-----|-----| -|python|41001| |PYTHON_EXECUTE_ERROR|LinkisPythonErrorCodeSummary| -|python|60003|Pyspark process has stopped, query failed!(Pyspark 进程已停止,查询失败!)|PYSPARK_PROCESSS_STOPPED|LinkisPythonErrorCodeSummary| -|python|400201|Invalid python session.(无效的 python 会话.)|INVALID_PYTHON_SESSION|LinkisPythonErrorCodeSummary| +| 模块名(服务名) | 错误码 | 描述 |enumeration name| Exception Class| +| -------- |--------| ----- |-----|-----| +|python| 60002 | |PYTHON_EXECUTE_ERROR|LinkisPythonErrorCodeSummary| +|python| 60003 |Pyspark process has stopped, query failed!(Pyspark 进程已停止,查询失败!)|PYSPARK_PROCESSS_STOPPED|LinkisPythonErrorCodeSummary| +|python| 400201 |Invalid python session.(无效的 python 会话.)|INVALID_PYTHON_SESSION|LinkisPythonErrorCodeSummary| diff --git a/docs/info-1.3.2.md b/docs/info-1.3.2.md index 55e5aeecbc..0c690517fb 100644 --- a/docs/info-1.3.2.md +++ b/docs/info-1.3.2.md @@ -4,4 +4,5 @@ |------------------| ----- |----------------------------------------------------------------------|------| ------------------------------------------------------- | | linkis-jobhistory | 新增 | wds.linkis.jobhistory.admin | hadoop |可以查看所有历史任务的用户 注意:wds.linkis.governance.station.admin 为管理用户(也具有可以查看所有历史任务的权限)| | linkis | 新增 | wds.linkis.governance.station.admin.token | /具有管理员权限的特殊token| +| linkis | 新增 | linkis.configuration.remove.application.cache | IDE |清除该应用的配置缓存| | cg-entrance | 新增 | linkis.entrance.auto.clean.dirty.data.enable | true |entrance重启调用ps-jobhistory接口是否开启,ture为开启,取值范围:true或false| diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java index d23f4a0867..0ecb3dc2a5 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ByteTimeUtils.java @@ -213,7 +213,6 @@ private static long parseByteString(String str, ByteUnit unit) { } else { throw new NumberFormatException("Failed to parse byte string: " + str); } - suffix = suffix.toLowerCase(); // Check for invalid suffixes if (suffix != null && !byteSuffixes.containsKey(suffix)) { throw new NumberFormatException("Invalid suffix: \"" + suffix + "\""); @@ -297,6 +296,18 @@ public static long byteStringAsGb(String str) { return parseByteString(str, ByteUnit.GiB); } + /** + * Convert a passed byte string (e.g. -50b, -100k, or -250m) to gibibytes for internal use. + * + *

If no suffix is provided, the passed number is assumed to be in gibibytes. + */ + public static long negativeByteStringAsGb(String str) { + if (str.startsWith("-")) { + return Math.negateExact(parseByteString(str.substring(1), ByteUnit.GiB)); + } + return parseByteString(str, ByteUnit.GiB); + } + /** * Returns a byte array with the buffer's contents, trying to avoid copying the data if possible. */ diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ResultSetUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ResultSetUtils.java new file mode 100644 index 0000000000..a367b38b80 --- /dev/null +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/ResultSetUtils.java @@ -0,0 +1,57 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import org.apache.linkis.common.io.FsPath; + +import java.io.File; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class ResultSetUtils { + + // Sort in ASC order by numx in the result set _numx.dolphin file name + public static Comparator getResultSetFileComparatorOrderByNameNum() { + + Comparator comparator = + (o1, o2) -> { + // get the num of file name + String regx = "\\d+"; + + String[] res1 = o1.getPath().split(File.separator); + String fileName1 = res1[res1.length - 1]; + Matcher matcher1 = Pattern.compile(regx).matcher(fileName1); + int num1 = matcher1.find() ? Integer.parseInt(matcher1.group()) : Integer.MAX_VALUE; + + String[] res2 = o2.getPath().split(File.separator); + String fileName2 = res2[res2.length - 1]; + Matcher matcher2 = Pattern.compile(regx).matcher(fileName2); + int num2 = matcher2.find() ? Integer.parseInt(matcher2.group()) : Integer.MAX_VALUE; + + return num1 - num2; + }; + return comparator; + } + + public static void sortByNameNum(List fsPathList) { + Collections.sort(fsPathList, getResultSetFileComparatorOrderByNameNum()); + } +} diff --git a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java index d71f8b40e6..615472474d 100644 --- a/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java +++ b/linkis-commons/linkis-common/src/main/java/org/apache/linkis/common/utils/VariableOperationUtils.java @@ -28,7 +28,10 @@ import java.util.Iterator; import java.util.Map; +import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.json.JsonMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; @@ -49,6 +52,9 @@ public class VariableOperationUtils { private static final String[] CYCLES = new String[] {CYCLE_YEAR, CYCLE_MONTH, CYCLE_DAY, CYCLE_HOUR, CYCLE_MINUTE, CYCLE_SECOND}; + private static final ObjectMapper mapper = + JsonMapper.builder().enable(DeserializationFeature.FAIL_ON_TRAILING_TOKENS).build(); + /** * yyyy-MM-dd HH:mm:ss * @@ -78,30 +84,44 @@ public static ZonedDateTime toZonedDateTime(Date date) { * @param str * @return */ + @Deprecated public static String replaces(ZonedDateTime dateTime, String str) throws VariableOperationFailedException { - return replaces(dateTime, str, true); + try { + JsonNode rootNode = mapper.readTree(str); + if (rootNode.isArray() || rootNode.isObject()) { + replaceJson(dateTime, rootNode); + return rootNode.toString(); + } + } catch (Exception e) { + return replace(dateTime, str); + } + return replace(dateTime, str); } /** * json support variable operation * + * @param codeType * @param dateTime * @param str - * @param format * @return */ - public static String replaces(ZonedDateTime dateTime, String str, boolean format) + public static String replaces(String codeType, ZonedDateTime dateTime, String str) throws VariableOperationFailedException { - try { - JsonNode rootNode = JsonUtils.jackson().readTree(str); - if (rootNode.isArray() || rootNode.isObject()) { - replaceJson(dateTime, rootNode); - return rootNode.toString(); + String languageType = CodeAndRunTypeUtils.getLanguageTypeByCodeType(codeType, ""); + if (languageType.equals(CodeAndRunTypeUtils.LANGUAGE_TYPE_JSON())) { + try { + JsonNode rootNode = mapper.readTree(str); + if (rootNode.isArray() || rootNode.isObject()) { + replaceJson(dateTime, rootNode); + return rootNode.toString(); + } + } catch (Exception e) { + return replace(dateTime, str); } - } catch (Exception e) { - return replace(dateTime, str); } + return replace(dateTime, str); } @@ -197,7 +217,7 @@ private static void replaceJson(ZonedDateTime dateTime, JsonNode object) } else if (temp.isObject()) { replaceJson(dateTime, temp); } else { - arrayNode.insert(i, replace(dateTime, temp.toString())); + arrayNode.set(i, replace(dateTime, temp.toString())); } } } else if (object.isObject()) { diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala index 55535e5336..14febab63a 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala @@ -22,7 +22,7 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils -import java.io.{File, FileInputStream, InputStream, IOException} +import java.io._ import java.util.Properties import java.util.concurrent.TimeUnit import java.util.concurrent.locks.ReentrantReadWriteLock @@ -140,15 +140,20 @@ private[conf] object BDPConfiguration extends Logging { private def initConfig(config: Properties, filePath: String) { var inputStream: InputStream = null - + var reader: InputStreamReader = null + var buff: BufferedReader = null Utils.tryFinally { Utils.tryCatch { inputStream = new FileInputStream(filePath) - config.load(inputStream) + reader = new InputStreamReader(inputStream, "UTF-8") + buff = new BufferedReader(reader) + config.load(buff) } { case e: IOException => logger.error("Can't load " + filePath, e) } } { + IOUtils.closeQuietly(buff) + IOUtils.closeQuietly(reader) IOUtils.closeQuietly(inputStream) } } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala index 30bdeb4b14..6c5bd7cf3c 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/utils/VariableUtils.scala @@ -143,9 +143,10 @@ object VariableUtils extends Logging { } initAllDateVars(run_date, nameAndType) val codeOperation = parserVar(code, nameAndType) - parserDate(codeOperation, run_date) + parserDate(codeType, codeOperation, run_date) } + @deprecated private def parserDate(code: String, run_date: CustomDateType): String = { if (Configuration.VARIABLE_OPERATION) { val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) @@ -155,6 +156,15 @@ object VariableUtils extends Logging { } } + private def parserDate(codeType: String, code: String, run_date: CustomDateType): String = { + if (Configuration.VARIABLE_OPERATION) { + val zonedDateTime: ZonedDateTime = VariableOperationUtils.toZonedDateTime(run_date.getDate) + VariableOperationUtils.replaces(codeType, zonedDateTime, code) + } else { + code + } + } + private def initAllDateVars( run_date: CustomDateType, nameAndType: mutable.Map[String, variable.VariableType] @@ -337,7 +347,7 @@ object VariableUtils extends Logging { * * @param code * :code - * @param codeType + * @param languageType * :SQL,PYTHON * @return */ @@ -346,27 +356,37 @@ object VariableUtils extends Logging { var varString: String = null var errString: String = null + var rightVarString: String = null languageType match { case CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL => varString = """\s*--@set\s*.+\s*""" + rightVarString = """^\s*--@set\s*.+\s*""" errString = """\s*--@.*""" case CodeAndRunTypeUtils.LANGUAGE_TYPE_PYTHON | CodeAndRunTypeUtils.LANGUAGE_TYPE_SHELL => varString = """\s*#@set\s*.+\s*""" + rightVarString = """^\s*#@set\s*.+\s*""" errString = """\s*#@""" case CodeAndRunTypeUtils.LANGUAGE_TYPE_SCALA => varString = """\s*//@set\s*.+\s*""" + rightVarString = """^\s*//@set\s*.+\s*""" errString = """\s*//@.+""" case CodeAndRunTypeUtils.LANGUAGE_TYPE_JAVA => varString = """\s*!!@set\s*.+\s*""" + rightVarString = """^\s*!!@set\s*.+\s*""" case _ => return nameAndValue } val customRegex = varString.r.unanchored + val customRightRegex = rightVarString.r.unanchored val errRegex = errString.r.unanchored code.split("\n").foreach { str => { + + if (customRightRegex.unapplySeq(str).size < customRegex.unapplySeq(str).size) { + logger.warn(s"code:$str is wrong custom variable format!!!") + } str match { case customRegex() => val clearStr = if (str.endsWith(";")) str.substring(0, str.length - 1) else str diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/present/PresentModeImplTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/conf/BDPConfigurationTest.java similarity index 60% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/present/PresentModeImplTest.java rename to linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/conf/BDPConfigurationTest.java index 8f776bef81..5a025eb8b0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/present/PresentModeImplTest.java +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/conf/BDPConfigurationTest.java @@ -15,22 +15,27 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present; +package org.apache.linkis.common.conf; import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class PresentModeImplTest { +/** BDPConfiguration Tester */ +public class BDPConfigurationTest { @Test - @DisplayName("enumTest") - public void enumTest() { + public void testGetOption() { - String stdoutName = PresentModeImpl.STDOUT.getName(); - String textFileName = PresentModeImpl.TEXT_FILE.getName(); + Assertions.assertEquals( + "properties支持中文", + BDPConfiguration.getOption( + CommonVars.apply("linkis.jobhistory.error.msg.tip", "properties支持中文")) + .get()); - Assertions.assertEquals("STDOUT", stdoutName); - Assertions.assertEquals("TEXT_FILE", textFileName); + Assertions.assertEquals( + "properties支持中文(默认)", + BDPConfiguration.getOption( + CommonVars.apply("linkis.jobhistory.error.msg.tip1", "properties支持中文(默认)")) + .get()); } } diff --git a/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/ByteTimeUtilsTest.java b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/ByteTimeUtilsTest.java new file mode 100644 index 0000000000..f548d89d46 --- /dev/null +++ b/linkis-commons/linkis-common/src/test/java/org/apache/linkis/common/utils/ByteTimeUtilsTest.java @@ -0,0 +1,175 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.common.utils; + +import java.util.function.Function; + +import com.google.common.collect.ImmutableMap; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ByteTimeUtilsTest { + + private static final ImmutableMap> opFunction = + ImmutableMap.>builder() + .put("byteStringAsBytes", tar -> ByteTimeUtils.byteStringAsBytes(tar)) + .put("byteStringAsKb", tar -> ByteTimeUtils.byteStringAsKb(tar)) + .put("byteStringAsMb", tar -> ByteTimeUtils.byteStringAsMb(tar)) + .put("byteStringAsGb", tar -> ByteTimeUtils.byteStringAsGb(tar)) + .build(); + + private static final ImmutableMap convertToByte = + ImmutableMap.builder() + .put("1", 1l) + .put("1b", 1l) + .put("1B", 1l) + .put("1k", 1024l) + .put("1K", 1024l) + .put("1kb", 1024l) + .put("1Kb", 1024l) + .put("1kB", 1024l) + .put("1KB", 1024l) + .put("1m", 1024l * 1024l) + .put("1M", 1024l * 1024l) + .put("1mb", 1024l * 1024l) + .put("1Mb", 1024l * 1024l) + .put("1mB", 1024l * 1024l) + .put("1MB", 1024l * 1024l) + .put("1g", 1024l * 1024l * 1024l) + .put("1G", 1024l * 1024l * 1024l) + .put("1gb", 1024l * 1024l * 1024l) + .put("1gB", 1024l * 1024l * 1024l) + .put("1Gb", 1024l * 1024l * 1024l) + .put("1GB", 1024l * 1024l * 1024l) + .put("1t", 1024l * 1024l * 1024l * 1024l) + .put("1T", 1024l * 1024l * 1024l * 1024l) + .put("1tb", 1024l * 1024l * 1024l * 1024l) + .put("1Tb", 1024l * 1024l * 1024l * 1024l) + .put("1tB", 1024l * 1024l * 1024l * 1024l) + .put("1TB", 1024l * 1024l * 1024l * 1024l) + .put("1p", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1P", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1pb", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1Pb", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1pB", 1024l * 1024l * 1024l * 1024l * 1024l) + .put("1PB", 1024l * 1024l * 1024l * 1024l * 1024l) + .build(); + + private static final ImmutableMap convertToKB = + ImmutableMap.builder() + .put("1", 1l) + .put("1024b", 1l) + .put("1024B", 1l) + .put("1k", 1l) + .put("1K", 1l) + .put("1kb", 1l) + .put("1Kb", 1l) + .put("1kB", 1l) + .put("1KB", 1l) + .put("1m", 1024l) + .put("1M", 1024l) + .put("1mb", 1024l) + .put("1Mb", 1024l) + .put("1mB", 1024l) + .put("1MB", 1024l) + .put("1g", 1024l * 1024l) + .put("1G", 1024l * 1024l) + .put("1gb", 1024l * 1024l) + .put("1gB", 1024l * 1024l) + .put("1Gb", 1024l * 1024l) + .put("1GB", 1024l * 1024l) + .build(); + + private static final ImmutableMap convertToMB = + ImmutableMap.builder() + .put("1", 1l) + .put("1024k", 1l) + .put("1024K", 1l) + .put("1024kb", 1l) + .put("1024Kb", 1l) + .put("1024kB", 1l) + .put("1024KB", 1l) + .put("1m", 1l) + .put("1M", 1l) + .put("1mb", 1l) + .put("1Mb", 1l) + .put("1mB", 1l) + .put("1MB", 1l) + .put("1g", 1024l) + .put("1G", 1024l) + .put("1gb", 1024l) + .put("1gB", 1024l) + .put("1Gb", 1024l) + .put("1GB", 1024l) + .build(); + + private static final ImmutableMap convertToGB = + ImmutableMap.builder() + .put("1", 1l) + .put("1024m", 1l) + .put("1024M", 1l) + .put("1024mb", 1l) + .put("1024Mb", 1l) + .put("1024mB", 1l) + .put("1024MB", 1l) + .put("1g", 1l) + .put("1G", 1l) + .put("1gb", 1l) + .put("1gB", 1l) + .put("1Gb", 1l) + .put("1GB", 1l) + .put("1t", 1024l) + .put("1T", 1024l) + .put("1tb", 1024l) + .put("1Tb", 1024l) + .put("1tB", 1024l) + .put("1TB", 1024l) + .build(); + + @Test + void byteStringAsBytes() { + convertToByte.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsBytes").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsBytes").apply("1A")); + } + + @Test + void byteStringAsKb() { + convertToKB.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsKb").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsKb").apply("1a")); + } + + @Test + void byteStringAsMb() { + convertToMB.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsMb").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsMb").apply("1c")); + } + + @Test + void byteStringAsGb() { + convertToGB.forEach( + (k, v) -> Assertions.assertEquals(opFunction.get("byteStringAsGb").apply(k), v)); + Assertions.assertThrows( + IllegalArgumentException.class, () -> opFunction.get("byteStringAsGb").apply("1C")); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/user.properties b/linkis-commons/linkis-common/src/test/resources/linkis.properties similarity index 86% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/user.properties rename to linkis-commons/linkis-common/src/test/resources/linkis.properties index faa650fc5c..230ad85a4e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/user.properties +++ b/linkis-commons/linkis-common/src/test/resources/linkis.properties @@ -12,8 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. # -# -user.props=xxxxxxxxxxxxx -conf.prop.integer=9 -conf.prop.string=str -wds.linkis.client.param.conf.spark.executor.memory=11111G \ No newline at end of file + +linkis.jobhistory.error.msg.tip=properties支持中文 \ No newline at end of file diff --git a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala index c0d4ad1d61..e7a105497c 100644 --- a/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala +++ b/linkis-commons/linkis-common/src/test/scala/org/apache/linkis/common/utils/VariableUtilsTest.scala @@ -22,6 +22,8 @@ import org.apache.linkis.common.variable.DateTypeUtils.{getCurHour, getToday} import java.util +import scala.collection.mutable + import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Test @@ -63,4 +65,19 @@ class VariableUtilsTest { assertEquals(VariableUtils.replace(sql, "sql", varMap), resSql) } + @Test + def testGetCustomVar: Unit = { + var scalaCode = "" + + "-------@set globalpara=60--------\n" + + "--@set globalpara2=66\n" + + "select ${globalpara} as globalpara,\n" + + "-- ${globalpara1} as globalpara1, \n" + + "${globalpara2} as globalpara2;\n" + var pythonCode = "" + + val nameAndValue: mutable.Map[String, String] = + VariableUtils.getCustomVar(scalaCode, CodeAndRunTypeUtils.LANGUAGE_TYPE_SQL); + assertEquals(nameAndValue.size, 2) + } + } diff --git a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala index c3e5afba30..b1fc579f3c 100644 --- a/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala +++ b/linkis-commons/linkis-httpclient/src/main/scala/org/apache/linkis/httpclient/config/ClientConfigBuilder.scala @@ -17,6 +17,7 @@ package org.apache.linkis.httpclient.config +import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.{DefaultRetryHandler, RetryHandler} import org.apache.linkis.httpclient.authentication.AuthenticationStrategy import org.apache.linkis.httpclient.loadbalancer.LoadBalancerStrategy @@ -38,7 +39,12 @@ class ClientConfigBuilder protected () { protected var readTimeout: Long = _ protected var maxConnection: Int = _ protected var retryEnabled: Boolean = true - protected var retryHandler: RetryHandler = new DefaultRetryHandler + + protected var retryHandler: RetryHandler = { + val retryHandler = new DefaultRetryHandler + retryHandler.addRetryException(classOf[LinkisRetryException]) + retryHandler + } def addServerUrl(serverUrl: String): this.type = { this.serverUrl = serverUrl diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml index b70331e0d1..cbaf0295fd 100644 --- a/linkis-commons/linkis-module/pom.xml +++ b/linkis-commons/linkis-module/pom.xml @@ -294,6 +294,7 @@ org.springframework.security spring-security-crypto + 5.7.5 diff --git a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala index 411bda4820..5cc796d23e 100644 --- a/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala +++ b/linkis-commons/linkis-module/src/main/scala/org/apache/linkis/server/security/SecurityFilter.scala @@ -199,7 +199,11 @@ object SecurityFilter { def getLoginUsername(req: HttpServletRequest): String = { if (Configuration.IS_TEST_MODE.getValue) { - ServerConfiguration.BDP_TEST_USER.getValue; + val testUser = ServerConfiguration.BDP_TEST_USER.getValue + if (StringUtils.isBlank(testUser)) { + throw new IllegalUserTicketException("Need to set test user when enable test module") + } + testUser } else { getLoginUser(req).getOrElse( throw new IllegalUserTicketException(ILLEGAL_USER_TOKEN.getErrorDesc) diff --git a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java index 42661f8283..6eb97c84d9 100644 --- a/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java +++ b/linkis-commons/linkis-protocol/src/main/java/org/apache/linkis/protocol/constants/TaskConstant.java @@ -69,6 +69,7 @@ public interface TaskConstant { String TICKET_ID = "ticketId"; String ENGINE_CONN_TASK_ID = "engineConnTaskId"; String ENGINE_CONN_SUBMIT_TIME = "engineConnSubmitTime"; + String DEBUG_ENBALE = "debug.enable"; String PARAMS_DATA_SOURCE = "dataSources"; diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala index 91b0e01727..0109472a90 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/callback/LogCallbackProtocol.scala @@ -19,9 +19,6 @@ package org.apache.linkis.protocol.callback import org.apache.linkis.protocol.message.RequestProtocol -// TODO: log type -case class LogCallbackProtocol(nodeId: String, logs: Array[String]) extends RequestProtocol - case class YarnAPPIdCallbackProtocol(nodeId: String, applicationId: String) extends RequestProtocol case class YarnInfoCallbackProtocol(nodeId: String, uri: String) extends RequestProtocol diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala index 3b94bbdc14..9b2be16ef7 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala @@ -97,4 +97,15 @@ object TaskUtils { def addLabelsMap(params: util.Map[String, AnyRef], labels: util.Map[String, AnyRef]): Unit = addMap(params, labels, TaskConstant.LABELS) + def isWithDebugInfo(params: util.Map[String, AnyRef]): Boolean = { + val debug = getConfigurationMap(params, TaskConstant.PARAMS_CONFIGURATION_STARTUP).get( + TaskConstant.DEBUG_ENBALE + ) + if (debug != null && "true".equals(debug.toString)) { + true + } else { + false + } + } + } diff --git a/linkis-commons/linkis-rpc/pom.xml b/linkis-commons/linkis-rpc/pom.xml index 3a51a0bfcd..e916107f50 100644 --- a/linkis-commons/linkis-rpc/pom.xml +++ b/linkis-commons/linkis-rpc/pom.xml @@ -56,15 +56,7 @@ com.fasterxml.jackson.core - jackson-annotations - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.core - jackson-databind + * com.google.code.findbugs @@ -78,8 +70,17 @@ org.springframework.cloud spring-cloud-commons + + commons-fileupload + commons-fileupload + + + commons-fileupload + commons-fileupload + ${commons-fileupload.version} + org.springframework.cloud spring-cloud-commons diff --git a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala index 32f12da273..22e6ea9f6c 100644 --- a/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala +++ b/linkis-commons/linkis-rpc/src/main/scala/org/apache/linkis/rpc/conf/RPCConfiguration.scala @@ -80,6 +80,13 @@ object RPCConfiguration { CommonVars("wds.linkis.gateway.conf.linkismanager.list", "linkisManager,engineplugin").getValue .split(",") + val LINKIS_DATASOURCE_SERVICE_NAME: CommonVars[String] = + CommonVars("linkis.gateway.conf.linkisdatasource.name", "linkis-ps-datasource") + + val LINKIS_DATASOURCE_SERVICE_LIST: Array[String] = + CommonVars("linkis.gateway.conf.linkisdatasource.list", "datasource").getValue + .split(",") + val BDP_RPC_INSTANCE_ALIAS_SERVICE_REFRESH_INTERVAL: CommonVars[TimeType] = CommonVars("wds.linkis.rpc.instancealias.refresh.interval", new TimeType("3s")) diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala index 50dce2ca12..165a274362 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/Consumer.scala @@ -41,7 +41,6 @@ abstract class Consumer(schedulerContext: SchedulerContext, executeService: Exec def start(): Unit def shutdown(): Unit = { - logger.info(s"$toString is ready to stop!") terminate = true logger.info(s"$toString stopped!") } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala index f3471b07dd..be1716f238 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/GroupFactory.scala @@ -19,6 +19,11 @@ package org.apache.linkis.scheduler.queue abstract class GroupFactory { + /** + * Create a Group and set the concurrency limit of the group + * @param event + * @return + */ def getOrCreateGroup(event: SchedulerEvent): Group def getGroup(groupName: String): Group diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala index b0bbfd3c2b..8bea7e52b1 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/LoopArrayQueue.scala @@ -40,7 +40,12 @@ class LoopArrayQueue(var group: Group) extends ConsumeQueue with Logging { override def getWaitingEvents: Array[SchedulerEvent] = { eventQueue synchronized { - toIndexedSeq.filter(x => x.getState.equals(SchedulerEventState.Inited)).toArray + toIndexedSeq + .filter(x => + x.getState.equals(SchedulerEventState.Inited) || x.getState + .equals(SchedulerEventState.Scheduled) + ) + .toArray } } diff --git a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala index 2a40c2517b..d541d8a2eb 100644 --- a/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala +++ b/linkis-commons/linkis-scheduler/src/main/scala/org/apache/linkis/scheduler/queue/fifoqueue/FIFOUserConsumer.scala @@ -73,6 +73,8 @@ class FIFOUserConsumer( override def getRunningEvents: Array[SchedulerEvent] = getEvents(e => e.isRunning || e.isWaitForRetry) + protected def getSchedulerContext: SchedulerContext = schedulerContext + private def getEvents(op: SchedulerEvent => Boolean): Array[SchedulerEvent] = { val result = ArrayBuffer[SchedulerEvent]() runningJobs.filter(_ != null).filter(x => op(x)).foreach(result += _) @@ -82,16 +84,28 @@ class FIFOUserConsumer( override def run(): Unit = { Thread.currentThread().setName(s"${toString}Thread") logger.info(s"$toString thread started!") - while (!terminate) { - Utils.tryAndError(loop()) - Utils.tryAndError(Thread.sleep(10)) + while (!terminate) Utils.tryAndError { + loop() + Thread.sleep(10) } logger.info(s"$toString thread stopped!") } protected def askExecutorGap(): Unit = {} + /** + * Task scheduling interception is used to judge the rules of task operation, and to judge other + * task rules based on Group. For example, Entrance makes Creator-level task judgment. + */ + protected def runScheduleIntercept(): Boolean = { + true + } + protected def loop(): Unit = { + if (!runScheduleIntercept()) { + Utils.tryQuietly(Thread.sleep(1000)) + return + } var isRetryJob = false def getWaitForRetryEvent: Option[SchedulerEvent] = { val waitForRetryJobs = runningJobs.filter(job => job != null && job.isJobCanRetry) @@ -110,7 +124,7 @@ class FIFOUserConsumer( if (event.isEmpty) { val completedNums = runningJobs.filter(job => job == null || job.isCompleted) if (completedNums.length < 1) { - Utils.tryQuietly(Thread.sleep(1000)) // TODO 还可以优化,通过实现JobListener进行优化 + Utils.tryQuietly(Thread.sleep(1000)) return } while (event.isEmpty) { @@ -119,7 +133,12 @@ class FIFOUserConsumer( if ( takeEvent.exists(e => Utils.tryCatch(e.turnToScheduled()) { t => - takeEvent.get.asInstanceOf[Job].onFailure("Job状态翻转为Scheduled失败!", t) + takeEvent.get + .asInstanceOf[Job] + .onFailure( + "Failed to change the job status to Scheduled(Job状态翻转为Scheduled失败)", + t + ) false } ) @@ -174,7 +193,7 @@ class FIFOUserConsumer( ) ) case error: Throwable => - job.onFailure("请求引擎失败,可能是由于后台进程错误!请联系管理员", error) + job.onFailure("Failed to request EngineConn", error) if (job.isWaitForRetry) { logger.warn(s"Ask executor for Job $job failed, wait for the next retry!", error) if (!isRetryJob) putToRunningJobs(job) @@ -190,6 +209,20 @@ class FIFOUserConsumer( override def shutdown(): Unit = { future.cancel(true) + val waitEvents = queue.getWaitingEvents + if (waitEvents.nonEmpty) { + waitEvents.foreach { + case job: Job => + job.onFailure("Your job will be marked as canceled because the consumer be killed", null) + case _ => + } + } + + this.runningJobs.foreach { job => + if (job != null && !job.isCompleted) { + job.onFailure("Your job will be marked as canceled because the consumer be killed", null) + } + } super.shutdown() } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java index f3b8f44865..68469cb520 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/excel/XlsUtils.java @@ -61,6 +61,7 @@ public static String excelToCsv( throws Exception { String hdfsPath = "/tmp/" + StorageUtils.getJvmUser() + "/" + System.currentTimeMillis() + ".csv"; + LOG.info("The excel to csv with hdfs path:" + hdfsPath); ExcelXlsReader xlsReader = new ExcelXlsReader(); RowToCsvDeal rowToCsvDeal = new RowToCsvDeal(); OutputStream out = null; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java index 3c82ceb523..a8123a1f92 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/exception/StorageErrorCode.java @@ -20,7 +20,9 @@ public enum StorageErrorCode { /** */ - FS_NOT_INIT(53001, "please init first(请先初始化)"); + FS_NOT_INIT(53001, "please init first(请先初始化)"), + INCONSISTENT_DATA(53001, "Inconsistent row data read,read %s,need rowLen %s"), + FS_OOM(53002, "OOM occurred while reading the file"); StorageErrorCode(int errorCode, String message) { this.code = errorCode; diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java index c19213a9cc..4bb7cb33b4 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/FileSystem.java @@ -24,8 +24,13 @@ import java.io.File; import java.io.IOException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + public abstract class FileSystem implements Fs { + private static final Logger LOG = LoggerFactory.getLogger(FileSystem.class); + protected String user; private String defaultFilePerm = "rwxr-----"; // 740 private String defaultFolderPerm = "rwxr-x---"; // 750 @@ -94,6 +99,7 @@ protected FsPath getParentPath(String path) { } else { parentPath = path.substring(0, path.lastIndexOf("/")); } + LOG.info("Get parent path:" + parentPath); return new FsPath(parentPath); } diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java index ce2ee43b7e..2df547f10e 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/fs/impl/LocalFileSystem.java @@ -75,12 +75,14 @@ public String listRoot() throws IOException { @Override public long getTotalSpace(FsPath dest) throws IOException { String path = dest.getPath(); + LOG.info("Get total space with path:" + path); return new File(path).getTotalSpace(); } @Override public long getFreeSpace(FsPath dest) throws IOException { String path = dest.getPath(); + LOG.info("Get free space with path:" + path); return new File(path).getFreeSpace(); } @@ -117,6 +119,7 @@ public boolean setOwner(FsPath dest, String user, String group) throws IOExcepti @Override public boolean setOwner(FsPath dest, String user) throws IOException { + LOG.info("Set owner with path:" + dest.getPath() + "and user:" + user); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setOwner skip"); return true; @@ -133,6 +136,7 @@ public boolean setOwner(FsPath dest, String user) throws IOException { @Override public boolean setGroup(FsPath dest, String group) throws IOException { + LOG.info("Set group with path:" + dest.getPath() + "and group:" + user); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setGroup skip"); return true; @@ -155,6 +159,7 @@ public boolean mkdir(FsPath dest) throws IOException { @Override public boolean mkdirs(FsPath dest) throws IOException { String path = dest.getPath(); + LOG.info("Try to mkdirs with path:" + path); File file = new File(path); // Create parent directories one by one and set their permissions to rwxrwxrwx. Stack dirsToMake = new Stack(); @@ -182,6 +187,7 @@ public boolean mkdirs(FsPath dest) throws IOException { } public boolean canMkdir(FsPath destParentDir) throws IOException { + LOG.info("Try to check if the directory can be created with path:" + destParentDir.getPath()); if (!StorageUtils.isIOProxy()) { LOG.debug("io not proxy, not check owner, just check if have write permission "); return this.canWrite(destParentDir); @@ -203,6 +209,7 @@ public boolean canMkdir(FsPath destParentDir) throws IOException { @Override public boolean copy(String origin, String dest) throws IOException { File file = new File(dest); + LOG.info("Try to copy file from:" + origin + " to dest:" + dest); if (!isOwner(file.getParent())) { throw new IOException("you have on permission to create file " + dest); } @@ -225,6 +232,7 @@ public boolean copy(String origin, String dest) throws IOException { @Override public boolean setPermission(FsPath dest, String permission) throws IOException { + LOG.info("Try to set permission dest with path:" + dest.getPath()); if (!StorageUtils.isIOProxy()) { LOG.info("io not proxy, setPermission as parent."); try { @@ -251,6 +259,7 @@ public boolean setPermission(FsPath dest, String permission) throws IOException public FsPathListWithError listPathWithError(FsPath path) throws IOException { File file = new File(path.getPath()); File[] files = file.listFiles(); + LOG.info("Try to list path:" + path.getPath() + " with error msg"); if (files != null) { List rtn = new ArrayList(); String message = ""; @@ -294,6 +303,7 @@ public void init(Map properties) throws IOException { String groupInfo; try { groupInfo = Utils.exec(new String[] {"id", user}); + LOG.info("Get groupinfo:" + groupInfo + " with shell command: id " + user); } catch (RuntimeException e) { group = user; return; @@ -322,7 +332,7 @@ public FsPath get(String dest) throws IOException { } else { fsPath = new FsPath(dest); } - + LOG.info("Try to get FsPath with path:" + fsPath.getPath()); PosixFileAttributes attr = null; try { attr = Files.readAttributes(Paths.get(fsPath.getPath()), PosixFileAttributes.class); @@ -365,7 +375,7 @@ public OutputStream write(FsPath dest, boolean overwrite) throws IOException { @Override public boolean create(String dest) throws IOException { - + LOG.info("try to create file with path:" + dest); File file = new File(dest); if (!isOwner(file.getParent())) { throw new IOException("you have on permission to create file " + dest); @@ -391,6 +401,7 @@ public boolean create(String dest) throws IOException { public List list(FsPath path) throws IOException { File file = new File(path.getPath()); File[] files = file.listFiles(); + LOG.info("Try to get file list with path:" + path.getPath()); if (files != null) { List rtn = new ArrayList(); for (File f : files) { diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala index bf03c78754..1d32d97fef 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/conf/LinkisStorageConf.scala @@ -60,4 +60,5 @@ object LinkisStorageConf { fileTypeArr } + val LINKIS_RESULT_ENABLE_NULL = CommonVars("linkis.resultset.enable.null.replace", true).getValue } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala index f9b811b6a6..63dce31ca4 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/csv/StorageCSVWriter.scala @@ -36,6 +36,8 @@ class StorageCSVWriter( with Logging { private val delimiter = separator match { + // Compatible with possible missing escape characters + case "t" => '\t' case separ if StringUtils.isNotEmpty(separ) => separ case _ => '\t' } @@ -50,14 +52,21 @@ class StorageCSVWriter( private def compact(row: Array[String]): String = { val quotationMarks: String = "\"" + val dealNewlineSymbolMarks: String = "\n" + def decorateValue(v: String): String = { if (StringUtils.isBlank(v)) v else { + var res = v if (quoteRetouchEnable) { - s"$quotationMarks${v.replaceAll(quotationMarks, "")}$quotationMarks" - } else v + res = s"$quotationMarks${v.replaceAll(quotationMarks, "")}$quotationMarks" + } + res = res.replaceAll(dealNewlineSymbolMarks, " ") + logger.debug("decorateValue with input:" + v + " output:" + res) + res } } + if (logger.isDebugEnabled()) { logger.debug("delimiter:" + delimiter.toString) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala index 55c82abd38..61d5fb7b3d 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/DataType.scala @@ -18,15 +18,15 @@ package org.apache.linkis.storage.domain import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.storage.conf.LinkisStorageConf import java.math.{BigDecimal => JavaBigDecimal} import java.sql.{Date, Timestamp} object DataType extends Logging { - val NULL_VALUE = "NULL" val LOWCASE_NULL_VALUE = "null" - // TODO Change to fine-grained regular expressions(改为精细化正则表达式) + val DECIMAL_REGEX = "^decimal\\(\\d*\\,\\d*\\)".r.unanchored val SHORT_REGEX = "^short.*".r.unanchored @@ -70,39 +70,53 @@ object DataType extends Logging { case _ => StringType } - def toValue(dataType: DataType, value: String): Any = Utils.tryCatch(dataType match { - case NullType => null - case StringType | CharType | VarcharType | StructType | ListType | ArrayType | MapType => - value - case BooleanType => if (isNumberNull(value)) null else value.toBoolean - case ShortIntType => if (isNumberNull(value)) null else value.toShort - case IntType => if (isNumberNull(value)) null else value.toInt - case LongType | BigIntType => if (isNumberNull(value)) null else value.toLong - case FloatType => if (isNumberNull(value)) null else value.toFloat - case DoubleType => if (isNumberNull(value)) null else value.toDouble - case DecimalType => if (isNumberNull(value)) null else new JavaBigDecimal(value) - case DateType => if (isNumberNull(value)) null else Date.valueOf(value) - case TimestampType => - if (isNumberNull(value)) null else Timestamp.valueOf(value).toString.stripSuffix(".0") - case BinaryType => if (isNull(value)) null else value.getBytes() - case _ => value - }) { t => - logger.debug(s"Failed to $value switch to dataType:", t) - value + def toValue(dataType: DataType, value: String): Any = { + var newValue: String = value + if (isLinkisNull(value)) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + return null + } else { + newValue = Dolphin.NULL + } + } + Utils.tryCatch(dataType match { + case NullType => null + case StringType | CharType | VarcharType | StructType | ListType | ArrayType | MapType => + newValue + case BooleanType => if (isNumberNull(newValue)) null else newValue.toBoolean + case ShortIntType => if (isNumberNull(newValue)) null else newValue.toShort + case IntType => if (isNumberNull(newValue)) null else newValue.toInt + case LongType | BigIntType => if (isNumberNull(newValue)) null else newValue.toLong + case FloatType => if (isNumberNull(newValue)) null else newValue.toFloat + case DoubleType => if (isNumberNull(newValue)) null else newValue.toDouble + case DecimalType => if (isNumberNull(newValue)) null else new JavaBigDecimal(newValue) + case DateType => if (isNumberNull(newValue)) null else Date.valueOf(newValue) + case TimestampType => + if (isNumberNull(newValue)) null else Timestamp.valueOf(newValue).toString.stripSuffix(".0") + case BinaryType => if (isNull(newValue)) null else newValue.getBytes() + case _ => newValue + }) { t => + logger.debug(s"Failed to $newValue switch to dataType:", t) + newValue + } + } + + def isLinkisNull(value: String): Boolean = { + if (value == null || value == Dolphin.LINKIS_NULL) true else false } def isNull(value: String): Boolean = - if (value == null || value == NULL_VALUE || value.trim == "") true else false + if (value == null || value == Dolphin.NULL || value.trim == "") true else false def isNumberNull(value: String): Boolean = - if (null == value || NULL_VALUE.equalsIgnoreCase(value) || value.trim == "") { + if (null == value || Dolphin.NULL.equalsIgnoreCase(value) || value.trim == "") { true } else { false } def valueToString(value: Any): String = { - if (null == value) return LOWCASE_NULL_VALUE + if (null == value) return null value match { case javaDecimal: JavaBigDecimal => javaDecimal.toPlainString diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala index 378c2c2ecb..8e7e7d6131 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/domain/Dolphin.scala @@ -39,7 +39,10 @@ object Dolphin extends Logging { val COL_SPLIT_LEN = COL_SPLIT_BYTES.length val NULL = "NULL" - val NULL_BYTES = "NULL".getBytes("utf-8") + val NULL_BYTES = NULL.getBytes("utf-8") + + val LINKIS_NULL = "LINKIS_NULL" + val LINKIS_NULL_BYTES = LINKIS_NULL.getBytes("utf-8") val INT_LEN = 10 @@ -59,6 +62,14 @@ object Dolphin extends Logging { def getString(bytes: Array[Byte], start: Int, len: Int): String = new String(bytes, start, len, Dolphin.CHAR_SET) + def toStringValue(value: String): String = { + if (LINKIS_NULL.equals(value)) { + NULL + } else { + value + } + } + /** * Read an integer value that converts the array to a byte of length 10 bytes * 读取整数值,该值为将数组转换为10字节长度的byte diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala index 38973ae1ab..d4836731db 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/DefaultResultSetFactory.scala @@ -108,7 +108,7 @@ class DefaultResultSetFactory extends ResultSetFactory with Logging { if (StringUtils.isEmpty(resultSetType)) { throw new StorageWarnException( THE_FILE_IS_EMPTY.getErrorCode, - s"The file (${fsPath.getPath}) is empty(文件(${fsPath.getPath}) 为空)" + MessageFormat.format(THE_FILE_IS_EMPTY.getErrorDesc, fsPath.getPath) ) } Utils.tryQuietly(inputStream.close()) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala index 663e379b5b..e61cf36b3d 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/ResultSetReader.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage.resultset import org.apache.linkis.common.io.{FsPath, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.storage.FSFactory import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.TABLE_ARE_NOT_SUPPORTED import org.apache.linkis.storage.exception.StorageErrorException @@ -26,7 +27,7 @@ import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord, Ta import java.io.InputStream -object ResultSetReader { +object ResultSetReader extends Logging { def getResultSetReader[K <: MetaData, V <: Record]( resultSet: ResultSet[K, V], @@ -83,6 +84,7 @@ object ResultSetReader { ) } val fs = FSFactory.getFs(resPath) + logger.info("Try to init Fs with path:" + resPath.getPath) fs.init(null) ResultSetReader.getResultSetReader(resultSet.asInstanceOf[TableResultSet], fs.read(resPath)) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala index fc303fbb5c..7b3aca62d9 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSet.scala @@ -34,6 +34,7 @@ abstract class StorageResultSet[K <: MetaData, V <: Record] extends ResultSet[K, } else { parentDir.toPath + "/" + fileName + Dolphin.DOLPHIN_FILE_SUFFIX } + logger.info(s"Get result set path:${path}") new FsPath(path) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala index 3f49faf3ed..80db410746 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetReader.scala @@ -21,7 +21,11 @@ import org.apache.linkis.common.io.{Fs, MetaData, Record} import org.apache.linkis.common.io.resultset.{ResultSet, ResultSetReader} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.storage.domain.Dolphin -import org.apache.linkis.storage.exception.StorageWarnException +import org.apache.linkis.storage.exception.{ + StorageErrorCode, + StorageErrorException, + StorageWarnException +} import org.apache.linkis.storage.utils.StorageUtils import java.io.{ByteArrayInputStream, InputStream, IOException} @@ -43,7 +47,6 @@ class StorageResultSetReader[K <: MetaData, V <: Record]( private var fs: Fs = _ private val READ_CACHE = 1024 - private val bytes = new Array[Byte](READ_CACHE) def this(resultSet: ResultSet[K, V], value: String) = { this(resultSet, new ByteArrayInputStream(value.getBytes(Dolphin.CHAR_SET))) @@ -74,24 +77,27 @@ class StorageResultSetReader[K <: MetaData, V <: Record]( case t: Throwable => throw t } - val rowBuffer = ArrayBuffer[Byte]() - var len = 0 - - // Read the entire line, except for the data of the line length(读取整行,除了行长的数据) - while (rowLen > 0 && len >= 0) { - if (rowLen > READ_CACHE) { - len = StorageUtils.readBytes(inputStream, bytes, READ_CACHE) - } else { - len = StorageUtils.readBytes(inputStream, bytes, rowLen) - } - - if (len > 0) { - rowLen -= len - rowBuffer ++= bytes.slice(0, len) - } + var bytes: Array[Byte] = null + try { + bytes = new Array[Byte](rowLen) + } catch { + case e: OutOfMemoryError => + logger.error("Result set read oom, read size {} Byte", rowLen) + throw new StorageErrorException( + StorageErrorCode.FS_OOM.getCode, + StorageErrorCode.FS_OOM.getMessage, + e + ) + } + val len = StorageUtils.readBytes(inputStream, bytes, rowLen) + if (len != rowLen) { + throw new StorageErrorException( + StorageErrorCode.INCONSISTENT_DATA.getCode, + String.format(StorageErrorCode.INCONSISTENT_DATA.getMessage, len.toString, rowLen.toString) + ) } rowCount = rowCount + 1 - rowBuffer.toArray + bytes } @scala.throws[IOException] diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala index 9c7947272c..17106dca67 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/StorageResultSetWriter.scala @@ -86,6 +86,7 @@ class StorageResultSetWriter[K <: MetaData, V <: Record]( WRITER_LOCK_CREATE.synchronized { if (!fileCreated) { if (storePath != null && outputStream == null) { + logger.info(s"Try to create a new file:${storePath}, with proxy user:${proxyUser}") fs = FSFactory.getFsByProxyUser(storePath, proxyUser) fs.init(null) FileSystemUtils.createNewFile(storePath, proxyUser, true) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala index d8e0560964..64d2b18b9c 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableRecord.scala @@ -27,10 +27,4 @@ class TableRecord(val row: Array[Any]) extends ResultRecord { new TableRecord(row) } - def tableRecordToString(nullValue: String = "NULL"): Array[String] = { - row.map { col => - StorageUtils.colToString(col, nullValue) - } - } - } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala index 40c4e031f7..32ea4f5e22 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultDeserializer.scala @@ -47,13 +47,13 @@ class TableResultDeserializer extends ResultDeserializer[TableMetaData, TableRec val columns = new ArrayBuffer[Column]() for (i <- 0 until (colArray.length, 3)) { var len = colArray(i).toInt - val colName = Dolphin.getString(bytes, index, len) + val colName = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) index += len len = colArray(i + 1).toInt - val colType = Dolphin.getString(bytes, index, len) + val colType = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) index += len len = colArray(i + 2).toInt - val colComment = Dolphin.getString(bytes, index, len) + val colComment = Dolphin.toStringValue(Dolphin.getString(bytes, index, len)) index += len columns += Column(colName, colType, colComment) } diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala index 94b6cb4c03..5d1738a346 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/resultset/table/TableResultSerializer.scala @@ -19,6 +19,7 @@ package org.apache.linkis.storage.resultset.table import org.apache.linkis.common.io.{MetaData, Record} import org.apache.linkis.common.io.resultset.ResultSerializer +import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.domain.Dolphin import scala.collection.mutable.ArrayBuffer @@ -45,14 +46,20 @@ class TableResultSerializer extends ResultSerializer { * @param line */ def lineToBytes(line: Array[Any]): Array[Byte] = { - // Data cache(数据缓存) val dataBytes = ArrayBuffer[Array[Byte]]() - // Column cache(列缓存) val colIndex = ArrayBuffer[Array[Byte]]() var colByteLen = 0 var length = 0 line.foreach { data => - val bytes = if (data == null) Dolphin.NULL_BYTES else Dolphin.getBytes(data) + val bytes = if (data == null) { + if (!LinkisStorageConf.LINKIS_RESULT_ENABLE_NULL) { + Dolphin.LINKIS_NULL_BYTES + } else { + Dolphin.NULL_BYTES + } + } else { + Dolphin.getBytes(data) + } dataBytes += bytes val colBytes = Dolphin.getBytes(bytes.length) colIndex += colBytes += Dolphin.COL_SPLIT_BYTES diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala index 7a9fa4f04c..b6830a13b4 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/FileSource.scala @@ -18,6 +18,7 @@ package org.apache.linkis.storage.source import org.apache.linkis.common.io._ +import org.apache.linkis.common.utils.Logging import org.apache.linkis.storage.conf.LinkisStorageConf import org.apache.linkis.storage.errorcode.LinkisStorageErrorCodeSummary.UNSUPPORTED_OPEN_FILE_TYPE import org.apache.linkis.storage.exception.StorageErrorException @@ -55,7 +56,7 @@ trait FileSource extends Closeable { def getFileSplits: Array[FileSplit] } -object FileSource { +object FileSource extends Logging { private val fileType = LinkisStorageConf.getFileTypeArr private val suffixPredicate = (path: String, suffix: String) => path.endsWith(s".$suffix") @@ -124,6 +125,7 @@ object FileSource { } private def createResultSetFileSplit(fsPath: FsPath, fs: Fs): FileSplit = { + logger.info(s"try create result set file split with path:${fsPath.getPath}") val resultset = ResultSetFactory.getInstance.getResultSetByPath(fsPath, fs) val resultsetReader = ResultSetReader.getResultSetReader(resultset, fs.read(fsPath)) new FileSplit(resultsetReader, resultset.resultSetType()) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala index 7b387e9f19..adbb596aa2 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/source/ResultsetFileSource.scala @@ -17,6 +17,7 @@ package org.apache.linkis.storage.source +import org.apache.linkis.storage.domain.Dolphin import org.apache.linkis.storage.resultset.table.TableRecord import org.apache.linkis.storage.utils.StorageUtils @@ -24,11 +25,27 @@ class ResultsetFileSource(fileSplits: Array[FileSplit]) extends AbstractFileSour shuffle({ case t: TableRecord => - new TableRecord(t.row.map { - case null | "NULL" => getParams.getOrDefault("nullValue", "NULL") - case "" => getParams.getOrDefault("nullValue", "") - case value: Double => StorageUtils.doubleToString(value) - case r => r + new TableRecord(t.row.map { rvalue => + { + rvalue match { + case null | "NULL" => + val nullValue = getParams.getOrDefault("nullValue", "NULL") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + rvalue + } else { + nullValue + } + case "" => + val nullValue = getParams.getOrDefault("nullValue", "") + if (nullValue.equals(Dolphin.LINKIS_NULL)) { + "" + } else { + nullValue + } + case value: Double => StorageUtils.doubleToString(value) + case _ => rvalue + } + } }) case record => record }) diff --git a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala index 90eb319fa0..7a5b134749 100644 --- a/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala +++ b/linkis-commons/linkis-storage/src/main/scala/org/apache/linkis/storage/utils/StorageUtils.scala @@ -46,7 +46,11 @@ object StorageUtils extends Logging { nf.setMaximumFractionDigits(StorageConfiguration.DOUBLE_FRACTION_LEN.getValue) def doubleToString(value: Double): String = { - nf.format(value) + if (value.isNaN) { + "NaN" + } else { + nf.format(value) + } } def loadClass[T](classStr: String, op: T => String): Map[String, T] = { @@ -211,17 +215,6 @@ object StorageUtils extends Logging { readLen } - def colToString(col: Any, nullValue: String = "NULL"): String = { - if (null == col) nullValue - else { - col match { - case value: Double => doubleToString(value) - case "NULL" | "" => nullValue - case _ => col.toString - } - } - } - def isIOProxy(): Boolean = { StorageConfiguration.ENABLE_IO_PROXY.getValue } diff --git a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala index 3bfc35b17c..5537794840 100644 --- a/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/domain/DataTypeTest.scala @@ -25,7 +25,7 @@ class DataTypeTest { @DisplayName("constTest") def constTest(): Unit = { - val nullvalue = DataType.NULL_VALUE + val nullvalue = Dolphin.NULL val lowcasenullvalue = DataType.LOWCASE_NULL_VALUE Assertions.assertEquals("NULL", nullvalue) @@ -60,4 +60,12 @@ class DataTypeTest { } + @Test + @DisplayName("toValueTest") + def toValueTest(): Unit = { + val dateType = DataType.toDataType("double") + val str = DataType.toValue(dateType, "NaN") + Assertions.assertNotNull(str) + } + } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobData.java b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala similarity index 72% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobData.java rename to linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala index 10d55e6541..6ae15782ee 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobData.java +++ b/linkis-commons/linkis-storage/src/test/scala/org/apache/linkis/storage/utils/StorageUtilsTest.scala @@ -15,20 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.job; +package org.apache.linkis.storage.utils -public interface JobData { - JobStatus getJobStatus(); +import org.junit.jupiter.api.{Assertions, DisplayName, Test} - String getJobID(); +class StorageUtilsTest { - String getUser(); + @Test + @DisplayName("doubleToStringTest") + def doubleToStringTest(): Unit = { + val str = StorageUtils.doubleToString(Double.NaN) + Assertions.assertEquals("NaN", str) - String getMessage(); + } - void setMessage(String message); - - Exception getException(); - - void setException(Exception e); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml deleted file mode 100644 index 980141f9a3..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml +++ /dev/null @@ -1,87 +0,0 @@ - - - - 4.0.0 - - - org.apache.linkis - linkis-cli - ${revision} - - linkis-cli-application - jar - - - - org.apache.linkis - linkis-cli-common - ${project.version} - - - org.apache.linkis - linkis-cli-core - ${project.version} - - - org.apache.linkis - linkis-computation-client - ${project.version} - - - org.apache.linkis - linkis-gateway-httpclient-support - ${project.version} - - - - - - - - org.apache.maven.plugins - maven-assembly-plugin - false - - false - out - false - false - - /src/main/assembly/distribution.xml - - - - - make-assembly - - single - - package - - - /src/main/assembly/distribution.xml - - - - - - - - - - diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java deleted file mode 100644 index 0c6fc1ec73..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java +++ /dev/null @@ -1,341 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.data.FinishedData; -import org.apache.linkis.cli.application.data.PreparedData; -import org.apache.linkis.cli.application.data.ProcessedData; -import org.apache.linkis.cli.application.interactor.command.LinkisCmdType; -import org.apache.linkis.cli.application.interactor.command.template.UniversalCmdTemplate; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperatorBuilder; -import org.apache.linkis.cli.application.suite.ExecutionSuite; -import org.apache.linkis.cli.application.suite.ExecutionSuiteFactory; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.exception.handler.CommandExceptionHandler; -import org.apache.linkis.cli.core.exception.handler.DefaultExceptionHandler; -import org.apache.linkis.cli.core.interactor.command.CmdTemplateFactory; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.Parser; -import org.apache.linkis.cli.core.interactor.command.parser.SingleCmdParser; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.properties.PropertiesLoader; -import org.apache.linkis.cli.core.interactor.properties.PropsFilesScanner; -import org.apache.linkis.cli.core.interactor.properties.StdPropsLoader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysEnvReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysPropsReader; -import org.apache.linkis.cli.core.interactor.result.DefaultResultHandler; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.interactor.validate.ParsedTplValidator; -import org.apache.linkis.cli.core.interactor.var.StdVarAccess; -import org.apache.linkis.cli.core.interactor.var.SysVarAccess; -import org.apache.linkis.cli.core.operator.JobOperatorBuilder; -import org.apache.linkis.cli.core.operator.JobOperatorFactory; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.PlainTextFileWriter; -import org.apache.linkis.cli.core.present.display.StdOutWriter; -import org.apache.linkis.cli.core.utils.LogUtils; -import org.apache.linkis.cli.core.utils.SchedulerUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisClientApplication { - private static Logger logger = LoggerFactory.getLogger(LinkisClientApplication.class); - - /** - * generate Templates load env variables TODO: load version info - * - * @return PreparedData - */ - private static PreparedData prepare() throws LinkisClientRuntimeException { - /* - generate template - */ - CmdTemplate template = new UniversalCmdTemplate(); - CmdTemplateFactory.register(template); - /* - load env variables - */ - Map propertiesMap = new HashMap<>(); - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReader(new SysPropsReader()) - .addPropertiesReader(new SysEnvReader()); - for (ClientProperties properties : loader.loadProperties()) { - propertiesMap.put(properties.getPropsId(), properties); - } - - return new PreparedData(propertiesMap); - } - - /** - * parse user input load user config load default config check if all inputs are ok - * - * @param args user input arguments - * @return ProcessedData - */ - private static ProcessedData processInput(String[] args, PreparedData preparedData) - throws Exception { - - if (preparedData == null) { - return null; - } - - /* - user input - */ - CmdTemplate template = CmdTemplateFactory.getTemplateCopy(LinkisCmdType.UNIVERSAL); - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(template) - .setFitter(new SingleTplFitter()); - - ParseResult result = parser.parse(args); - - ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); - parsedTplValidator.doValidation(result.getParsedTemplate()); - - Params params = result.getParams(); - logger.debug("==========params============\n" + Utils.GSON.toJson(params)); - - /* - VarAccess for sys_prop, sys_env - */ - Map propertiesMap = preparedData.getPropertiesMap(); - VarAccess sysVarAccess = - new SysVarAccess() - .setSysProp(propertiesMap.get(CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER)) - .setSysEnv(propertiesMap.get(CommonConstants.SYSTEM_ENV_IDENTIFIER)); - logger.debug("==========sys_var============\n" + Utils.GSON.toJson(sysVarAccess)); - - LogUtils.getInformationLogger() - .info( - "LogFile path: " - + sysVarAccess.getVar(String.class, AppKeys.LOG_PATH_KEY) - + "/" - + sysVarAccess.getVar(String.class, AppKeys.LOG_FILE_KEY)); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - // scan config files given root path - String configPath = sysVarAccess.getVar(String.class, AppKeys.CLIENT_CONFIG_ROOT_KEY); - String defaultConfFileName = - sysVarAccess.getVarOrDefault( - String.class, AppKeys.DEFAULT_CONFIG_FILE_NAME_KEY, AppConstants.DEFAULT_CONFIG_NAME); - if (StringUtils.isBlank(configPath)) { - throw new PropsException( - "PRP0007", - ErrorLevel.ERROR, - CommonErrMsg.PropsLoaderErr, - "configuration root path specified by env variable: " - + AppKeys.CLIENT_CONFIG_ROOT_KEY - + " is empty."); - } - - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - String userConfPath = null; - if (params.containsParam(AppKeys.LINKIS_CLIENT_USER_CONFIG)) { - userConfPath = - (String) params.getParamItemMap().get(AppKeys.LINKIS_CLIENT_USER_CONFIG).getValue(); - } - if (StringUtils.isNotBlank(userConfPath)) { - PropertiesReader reader = - new PropsFileReader() - .setPropsId(AppKeys.LINKIS_CLIENT_USER_CONFIG) - .setPropsPath(userConfPath); - readersList.add(reader); - } else { - LogUtils.getInformationLogger() - .info("User does not provide usr-configuration file. Will use default config"); - } - /* - load properties - */ - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - ClientProperties[] loaderResult = loader.loadProperties(); - for (ClientProperties properties : loaderResult) { - if (StringUtils.equals(properties.getPropsId(), AppKeys.LINKIS_CLIENT_USER_CONFIG)) { - for (Map.Entry prop : properties.entrySet()) { - if (StringUtils.startsWith( - (String) prop.getKey(), AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE)) { - throw new PropsException( - "PRP0007", - ErrorLevel.ERROR, - CommonErrMsg.PropsLoaderErr, - "User cannot specify non-customizable configuration: " + prop.getKey()); - } - } - } - propertiesMap.put(properties.getPropsId(), properties); - } - - /* - VarAccess for cmd, config - */ - VarAccess stdVarAccess = - new StdVarAccess() - .setCmdParams(params) - .setUserConf(propertiesMap.get(AppKeys.LINKIS_CLIENT_USER_CONFIG)) - .setDefaultConf(propertiesMap.get(defaultConfFileName)) - .init(); - logger.info("==========std_var============\n" + Utils.GSON.toJson(stdVarAccess)); - - /* - Prepare operator for accessing linkis - */ - JobOperatorBuilder builder = - new LinkisOperatorBuilder().setStdVarAccess(stdVarAccess).setSysVarAccess(sysVarAccess); - - JobOperatorFactory.register(AppKeys.REUSABLE_UJES_CLIENT, builder); - /* - Prepare DisplayOperator - */ - DisplayOperFactory.register(PresentModeImpl.STDOUT, new StdOutWriter()); - DisplayOperFactory.register(PresentModeImpl.TEXT_FILE, new PlainTextFileWriter()); - - return new ProcessedData( - AppConstants.DUMMY_CID, params.getCmdType(), stdVarAccess, sysVarAccess); - } - - /** - * submit job display result - * - * @return FinishedData - */ - private static FinishedData exec(ProcessedData data) throws Exception { - if (data == null) { - return null; - } - - ExecutionSuite suite = - ExecutionSuiteFactory.getSuite( - data.getCmdType(), data.getStdVarAccess(), data.getSysVarAccess()); - - /* - Get everything - */ - Map jobs = suite.getJobs(); - ResultHandler[] resultHandlers = suite.getResultHandlers(); - Execution execution = suite.getExecution(); - - /* - execute - */ - final Map jobsToKill = jobs; - Thread hook = new Thread(() -> execution.terminate(jobsToKill)); - if (jobsToKill != null && jobsToKill.size() != 0) { - Runtime.getRuntime().addShutdownHook(hook); - } - ExecutionResult result = execution.execute(jobs); - - Runtime.getRuntime().removeShutdownHook(hook); - - return new FinishedData(result, resultHandlers); - } - - public static void main(String[] args) { - - ExceptionHandler handler = new DefaultExceptionHandler(); - ProcessedData processedData = null; - FinishedData finishedData = null; - ExecutionResult executionResult = new ExecutionResultImpl(null, ExecutionStatusEnum.UNDEFINED); - PreparedData preparedData = null; - - try { - preparedData = prepare(); - } catch (Exception e) { - handler.handle(e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } - - try { - processedData = processInput(args, preparedData); - } catch (CommandException ce) { - new CommandExceptionHandler().handle(ce); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } catch (Exception e) { - handler.handle(e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } - - try { - finishedData = exec(processedData); - } catch (Exception e) { - handler.handle(e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - } - - if (finishedData != null) { - executionResult = finishedData.getExecutionResult(); - if (executionResult == null) { - executionResult = new ExecutionResultImpl(null, ExecutionStatusEnum.UNDEFINED); - } - if (executionResult.getException() != null) { - handler.handle(executionResult.getException()); - new DefaultResultHandler().process(executionResult); - } else { - if (finishedData.getResultHandlers() != null) { - for (ResultHandler resultHandler : finishedData.getResultHandlers()) { - if (resultHandler != null) { - resultHandler.process(executionResult); - } - } - } - } - } else { - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - new DefaultResultHandler().process(executionResult); - } - - SchedulerUtils.shutDown(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/FinishedData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/FinishedData.java deleted file mode 100644 index 4438cf301b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/FinishedData.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.data; - -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; - -public class FinishedData { - ExecutionResult executionResult; - ResultHandler[] resultHandlers; - - public FinishedData(ExecutionResult executionResult, ResultHandler[] resultHandlers) { - this.executionResult = executionResult; - this.resultHandlers = resultHandlers; - } - - public ExecutionResult getExecutionResult() { - return executionResult; - } - - public void setExecutionResult(ExecutionResult executionResult) { - this.executionResult = executionResult; - } - - public ResultHandler[] getResultHandlers() { - return resultHandlers; - } - - public void setResultHandlers(ResultHandler[] resultHandlers) { - this.resultHandlers = resultHandlers; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/ProcessedData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/ProcessedData.java deleted file mode 100644 index a69b7c4b63..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/ProcessedData.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.data; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.var.VarAccess; - -public class ProcessedData { - String cid; - CmdType cmdType; - VarAccess stdVarAccess; - VarAccess sysVarAccess; - - public ProcessedData( - String cid, CmdType cmdType, VarAccess stdVarAccess, VarAccess sysVarAccess) { - this.cid = cid; - this.cmdType = cmdType; - this.stdVarAccess = stdVarAccess; - this.sysVarAccess = sysVarAccess; - } - - public String getCid() { - return cid; - } - - public CmdType getCmdType() { - return cmdType; - } - - public VarAccess getStdVarAccess() { - return stdVarAccess; - } - - public VarAccess getSysVarAccess() { - return sysVarAccess; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisManageJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisManageJob.java deleted file mode 100644 index 7dbcaca88d..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisManageJob.java +++ /dev/null @@ -1,522 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.application.interactor.job.subtype.LinkisManSubType; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; -import org.apache.linkis.cli.core.utils.SchedulerUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.text.MessageFormat; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisManageJob extends LinkisJob - implements ManagableBackendJob, TerminatableJob, LogAccessibleJob, ResultAccessibleJob { - private static final Logger logger = LoggerFactory.getLogger(LinkisManageJob.class); - - private LinkisJobManDesc jobDesc; - private LinkisJobData data; - private TerminateToken terminateToken = new TerminateToken(); - - @Override - public LinkisJobOperator getJobOperator() { - if (!(super.getJobOperator() instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - return (LinkisJobOperator) super.getJobOperator(); - } - - @Override - public void setOperator(JobOperator operator) { - if (!(operator instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - super.setOperator(operator); - } - - @Override - public LinkisJobManDesc getJobDesc() { - return jobDesc; - } - - public void setJobDesc(LinkisJobManDesc jobDesc) { - this.jobDesc = jobDesc; - } - - @Override - public LinkisJobData getJobData() { - return data; - } - - public void setJobData(LinkisJobData data) { - this.data = data; - } - - @Override - public boolean isSuccess() { - return data.isSuccess(); - } - - @Override - public void doManage() throws LinkisClientRuntimeException { - LinkisManSubType subType = (LinkisManSubType) getSubType(); - if (!(subType instanceof LinkisManSubType)) { - throw new LinkisClientExecutionException( - "EXE0030", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "JobSubType is not instance of JobManSubType"); - } - switch (subType) { - case STATUS: - try { - data.updateByOperResult( - getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - if (data.getJobStatus() != null) { - data.setSuccess(true); - } - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - } - break; - // case JOB_DESC: - // result = jobManagableBackendExecutor.queryJobDesc(job); - // break; - case LOG: - try { - ((LinkisLogData) data).setIncLogMode(false); - startRetrieveLog(); - waitLogFin(); - data.setSuccess(true); - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - } - break; - case RESULT: - try { - startRetrieveResult(); - data.setSuccess(true); - } catch (LinkisClientExecutionException e) { - if (e.getCode().equals("EXE0037")) { - ((LinkisResultData) data).sendResultFin(); // inform listener to stop - data.setSuccess(true); - } else { - data.setSuccess(false); - data.setException(e); - } - LogUtils.getInformationLogger().warn(e.getMessage()); - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - LogUtils.getInformationLogger().warn(e.getMessage()); - } - break; - // case LIST: - // resultData = jobManExecutor.queryJobList(job); - // break; - case KILL: - doKill(); - break; - default: - throw new LinkisClientExecutionException( - "EXE0002", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "JobSubType + \"" + subType + "\" is not supported"); - } - } - - @Override - public void startRetrieveLog() { - if (jobDesc.getUser() == null || jobDesc.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - data.updateByOperResult(getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - startRetrieveLogInternal(data); - } - - public void waitLogFin() { - if (!(data instanceof LinkisLogData)) { - return; - } - int retry = 0; - int MAX_RETRY = 300; // wait for 10 minutes after job finish - while (retry++ < MAX_RETRY) { - if (((LinkisLogData) data).logFinReceived()) { - return; - } - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - } - - public void startRetrieveLogInternal(JobData jobData) { - if (!(jobData instanceof LinkisLogData)) { - throw new LinkisClientExecutionException( - "EXE0034", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "JobData is not LinkisLogData"); - } - if (jobData.getUser() == null || jobData.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - LinkisLogData logData = (LinkisLogData) jobData; - if (logData.getJobStatus() != null) { - try { - Thread logConsumer = new Thread(() -> logData.notifyLogListener(), "Log-Consumer"); - Thread logRetriever = new Thread(() -> queryLogLoop(logData), "Log-Retriever"); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logRetriever); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logConsumer); - } catch (Exception e) { - logger.warn("Failed to retrieve log", e); - } - } - } - - public void queryLogLoop(LinkisLogData data) { - int curLogIdx; - int nextLogIdx; - boolean hasNext = true; - int retryCnt = 0; - final int MAX_RETRY = 12; // continues fails for 90s, then exit thread - try { - while (hasNext) { - curLogIdx = data.getNextLogLineIdx() == null ? 0 : data.getNextLogLineIdx(); - try { - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - queryJobLogFromLine(data, curLogIdx); - } catch (Exception e) { - logger.error("Cannot get inc-log:", e); - // and yes sometimes server may not be able to prepare persisted-log - retryCnt++; - if (retryCnt >= MAX_RETRY) { - logger.error( - "Continuously failing to query inc-log for " - + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 - + "s. Will no longer try to query log", - e); - break; - } - Utils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer - continue; - } - retryCnt = 0; - nextLogIdx = data.getNextLogLineIdx() == null ? curLogIdx : data.getNextLogLineIdx(); - if (data.isIncLogMode()) { - hasNext = data.hasNextLogLine() == null ? curLogIdx < nextLogIdx : data.hasNextLogLine(); - } else { - hasNext = curLogIdx < nextLogIdx; - } - if (curLogIdx >= nextLogIdx) { - String msg = - MessageFormat.format( - "Job is still running, status={0}, progress={1}", - data.getJobStatus(), String.valueOf(data.getJobProgress() * 100) + "%"); - logger.info(msg); - } - Utils.doSleepQuietly(AppConstants.JOB_QUERY_SLEEP_MILLS); - } - } catch (Exception e) { - logger.error("Something goes wrong. Job Log may be incomplete", e); - } finally { - data.sendLogFin(); - } - } - - private void queryJobLogFromLine(LinkisLogData data, int fromLine) - throws LinkisClientRuntimeException { - if (!data.getJobStatus().isJobFinishedState()) { - try { - data.updateByOperResult( - getJobOperator() - .queryRunTimeLogFromLine( - data.getUser(), data.getJobID(), data.getExecID(), fromLine)); - } catch (Exception e) { - // job is finished while we start query log(but request is not send). - // then probably server cache is gone and we got a exception here. - // however we cannot know if this happens based on the exception message - logger.warn( - "Caught exception when querying runtime-log. Probably server-side has close stream. Will try openLog api if Job is completed.", - e); - if (data.getJobStatus().isJobFinishedState()) { - CommonUtils.doSleepQuietly(500l); - data.updateByOperResult( - getJobOperator() - .queryPersistedLogFromLine( - data.getUser(), data.getJobID(), data.getExecID(), fromLine)); - } - } - } else { - try { - data.updateByOperResult( - getJobOperator() - .queryPersistedLogFromLine( - data.getLogPath(), data.getUser(), data.getJobID(), fromLine)); - } catch (Exception e) { - logger.error("Cannot get persisted-inc-log:", e); - // and yes sometimes server may not be able to prepare persisted-log - throw e; - } - } - } - - @Override - public void startRetrieveResult() { - if (!(data instanceof LinkisResultData)) { - throw new LinkisClientExecutionException( - "EXE0034", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "JobData is not LinkisResultData"); - } - if (jobDesc.getUser() == null || jobDesc.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - data.updateByOperResult(getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - if (data.getJobStatus() == null) { - throw new LinkisClientExecutionException( - "EXE0038", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "jobStatus is null"); - } - LinkisResultData resultData = (LinkisResultData) data; - if (!resultData.getJobStatus().isJobSuccess() - || StringUtils.isBlank(resultData.getResultLocation())) { - resultData.updateByOperResult( - getJobOperator().queryJobInfo(resultData.getUser(), resultData.getJobID())); - } - if (!resultData.getJobStatus().isJobSuccess()) { - // throw new LinkisClientExecutionException("EXE0035", ErrorLevel.ERROR, - // CommonErrMsg.ExecutionErr, "Job status is not success but \'" + - // resultData.getJobStatus() + "\'. Will not try to retrieve any Result"); - LogUtils.getInformationLogger() - .info( - "Job status is not success but \'" - + resultData.getJobStatus() - + "\'. Will not try to retrieve any Result"); - resultData.sendResultFin(); // inform listener to stop - return; - } - if (StringUtils.isBlank(resultData.getResultLocation())) { - throw new LinkisClientExecutionException( - "EXE0037", - ErrorLevel.WARN, - CommonErrMsg.ExecutionErr, - "Got blank ResultLocation from server. Job may not have result-set. Will not try to retrieve any Result"); - } - - resultData.updateByOperResult( - getJobOperator() - .queryResultSetPaths( - resultData.getUser(), resultData.getJobID(), resultData.getResultLocation())); - if (resultData.getResultSetPaths() == null || resultData.getResultSetPaths().length == 0) { - String msg = "Your job got no result."; - logger.warn(msg); - resultData.sendResultFin(); // inform listener to stop - resultData.setHasResult(false); - return; - } - - try { - resultData.setHasResult(true); - Thread resultRetriever = new Thread(() -> queryResultLoop(resultData), "Result-Retriever"); - SchedulerUtils.getCachedThreadPoolExecutor().execute(resultRetriever); - } catch (Exception e) { - logger.error("Failed to retrieve result", e); - throw e; - } - } - - public void queryResultLoop(LinkisResultData data) { - boolean hasNext = true; - int retryCnt = 0; - final int MAX_RETRY = 30; // continues fails for 250s, then exit - int idx = 0; - try { - while (hasNext) { - try { - hasNext = queryOneResult(data, idx); - } catch (LinkisClientRuntimeException e) { - logger.error("Cannot get result:", e); - retryCnt++; - if (retryCnt >= MAX_RETRY) { - logger.error( - "Continuously failing to query result for " - + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 - + "s. Will no longer try to query result", - e); - return; - } else { - hasNext = true; - } - Utils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer - continue; - } - idx++; - } - } catch (Exception e) { - logger.error("Something goes wrong. Job Result may be incomplete", e); - throw e; - } finally { - data.sendResultFin(); - } - } - - private boolean queryOneResult(LinkisResultData data, int idxResultSet) { - Integer curPage = 1; - boolean hasNextResult = true; - boolean hasNextPage = true; - while (hasNextPage) { - data.updateByOperResult( - getJobOperator() - .queryResultSetGivenResultSetPath( - data.getResultSetPaths(), - idxResultSet, - data.getUser(), - curPage, - AppConstants.RESULTSET_PAGE_SIZE)); - if (data.hasNextResultPage() == null) { - throw new LinkisClientExecutionException( - "EXE0040", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionResultErr, - "Something foes wrong. Got null as \'hasNextPage\'."); - } - hasNextPage = data.hasNextResultPage(); - - curPage++; - hasNextResult = idxResultSet + 1 < data.getResultSetPaths().length; - } - return hasNextResult; - } - - public void doKill() { - data.updateByOperResult(getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - if (data.getUser() == null || data.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - if (data.getJobStatus() == null) { - throw new LinkisClientExecutionException( - "EXE0038", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "jobStatus is null"); - } - String msg; - if (data.getJobStatus().isJobCancelled()) { - msg = "Kill job aborted: Job has already been canceled."; - data.setSuccess(false); - data.setMessage(msg); - } else if (data.getJobStatus().isJobFinishedState()) { - msg = "Kill job aborted: Job is already in finished-state(SUCCEED/FAILED)."; - data.setSuccess(false); - data.setMessage(msg); - // throw new LinkisClientExecutionException(JobStatus.FAILED, "EXE0004", - // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } else { - try { - data.updateByOperResult( - getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - data.updateByOperResult( - getJobOperator().kill(data.getUser(), data.getJobID(), data.getExecID())); - } catch (Exception e) { - data.setSuccess(false); - data.setMessage("Exception thrown when trying to send kill request"); - data.setException(e); - } - msg = "Kill request has been sent"; - LogUtils.getPlaintTextLogger().info(msg); - int retryCnt = 0; - final int MAX_RETRY = 5 * 6; - while (!data.getJobStatus().isJobFinishedState() && !data.getJobStatus().isJobCancelled()) { - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - try { - data.updateByOperResult( - getJobOperator().queryJobInfo(jobDesc.getUser(), jobDesc.getJobID())); - retryCnt = 0; // if exception then will not go here - } catch (Exception e) { - retryCnt++; - CommonUtils.doSleepQuietly(5 * CommonConstants.JOB_QUERY_SLEEP_MILLS); - if (retryCnt >= MAX_RETRY) { - data.setSuccess(false); - data.setMessage( - MessageFormat.format( - "After send kill. Client cannot get jobStatus from server continuously for {0} seconds. Client aborted. Assume kill failed! Error message: \n", - MAX_RETRY * 5 * CommonConstants.JOB_QUERY_SLEEP_MILLS)); - data.setException(e); - return; - } - } - } - if (data.getJobStatus().isJobFinishedState() && !data.getJobStatus().isJobCancelled()) { - msg = "Kill Failed: Job Current status: " + data.getJobStatus(); - data.setSuccess(false); - data.setMessage(msg); - // throw new LinkisClientExecutionException(JobStatus.FAILED, - // "EXE0004", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } else if (data.getJobStatus().isJobCancelled()) { - msg = - MessageFormat.format( - "Kill successful: jobId={0}, status={1}.", data.getJobID(), data.getJobStatus()); - data.setSuccess(true); - data.setMessage(msg); - // LogUtils.getPlaintTextLogger().info(msg); - } - } - return; - } - - @Override - public TerminateToken getTerminateToken() { - return terminateToken; - } - - public void setTerminateToken(TerminateToken terminateToken) { - this.terminateToken = terminateToken; - } - - @Override - public void terminate() throws LinkisClientRuntimeException { - return; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisOnceJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisOnceJob.java deleted file mode 100644 index 505b08cf26..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisOnceJob.java +++ /dev/null @@ -1,308 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisOnceJobData; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; -import org.apache.linkis.cli.core.utils.SchedulerUtils; - -import java.text.MessageFormat; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisOnceJob extends LinkisJob - implements ManagableBackendJob, - LogAccessibleJob, - ResultAccessibleJob, - AsyncBackendJob, - TerminatableJob { - - private static final Logger logger = LoggerFactory.getLogger(LinkisOnceJob.class); - - private LinkisOnceDesc jobDesc; - private LinkisOnceJobData data; - private TerminateToken terminateToken = new TerminateToken(); - private Boolean isAsync = false; - - public void setAsync(Boolean async) { - isAsync = async; - } - - public Boolean isAsync() { - return isAsync; - } - - @Override - public JobDescription getJobDesc() { - return jobDesc; - } - - public void setJobDesc(LinkisOnceDesc desc) { - this.jobDesc = desc; - } - - @Override - public TerminateToken getTerminateToken() { - return terminateToken; - } - - @Override - public LinkisJobData getJobData() { - return data; - } - - public void setJobData(LinkisOnceJobData data) { - this.data = data; - } - - @Override - public JobOperator getJobOperator() { - return null; - } - - /** AsyncBackendJob */ - @Override - public void submit() throws LinkisClientRuntimeException { - StringBuilder infoBuilder = new StringBuilder(); - infoBuilder - .append("connecting to linkis gateway:") - .append(data.getOnceJobAdapter().getServerUrl()); - LogUtils.getInformationLogger().info(infoBuilder.toString()); - data.getOnceJobAdapter().submit(); - data.getOnceJobAdapter().updateStatus(); - infoBuilder.setLength(0); - infoBuilder - .append("JobId:") - .append(data.getJobID()) - .append(System.lineSeparator()) - .append("ExecId:") - .append(data.getExecID()); - LogUtils.getPlaintTextLogger().info(infoBuilder.toString()); - if (isAsync) { - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSubmitted()); - } - } - - @Override - public void updateJobStatus() throws LinkisClientRuntimeException { - if (!data.getJobStatus().isJobFinishedState()) { - data.getOnceJobAdapter().updateStatus(); - String log2 = - "\n---------------------------------------------------\n" - + "\ttask " - + data.getJobID() - + " status is " - + data.getJobStatus() - + ", progress : " - + data.getJobProgress() - + "\n---------------------------------------------------"; - logger.info(log2); - } - } - - @Override - public void waitJobComplete() throws LinkisClientRuntimeException { - data.getOnceJobAdapter().waitForComplete(); - updateJobStatus(); - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSuccess()); - waitIncLogComplete(data); - } - - /** TerminatableJob */ - @Override - public void terminate() throws LinkisClientRuntimeException { - terminateToken.setTerminate(); - doKill(); - } - - @Override - public void startRetrieveResult() { - // TODO:wait for OnceJob to support this feature - data.sendResultFin(); - } - - @Override - public void startRetrieveLog() { - data.setIncLogMode(true); - startRetrieveLogInternal(data); - } - - public void startRetrieveLogInternal(LinkisOnceJobData jobData) { - if (!(jobData instanceof LinkisLogData)) { - throw new LinkisClientExecutionException( - "EXE0034", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "JobData is not LinkisLogData"); - } - if (jobData.getUser() == null || jobData.getJobID() == null) { - throw new LinkisClientExecutionException( - "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); - } - LinkisOnceJobData logData = jobData; - if (logData.getJobStatus() != null) { - try { - Thread logConsumer = new Thread(() -> logData.notifyLogListener(), "Log-Consumer"); - Thread logRetriever = new Thread(() -> queryLogLoop(logData), "Log-Retriever"); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logRetriever); - SchedulerUtils.getCachedThreadPoolExecutor().execute(logConsumer); - } catch (Exception e) { - logger.warn("Failed to retrieve log", e); - } - } - } - - private void queryJobLogOneIteration(LinkisOnceJobData data) throws LinkisClientRuntimeException { - try { - data.getOnceJobAdapter().queryJobLogOneIteration(); - // - // data.updateByOperResult(getJobOperator().queryRunTimeLogFromLine(data.getUser(), - // data.getJobID(), data.getExecID(), fromLine)); - } catch (Exception e) { - // job is finished while we start query log(but request is not send). - // then probably server cache is gone and we got a exception here. - // however we cannot know if this happens based on the exception message - logger.warn( - "Caught exception when querying runtime-log. Probably server-side has close stream. Will try openLog api if Job is completed.", - e); - if (data.getJobStatus().isJobFinishedState()) { - CommonUtils.doSleepQuietly(500l); - // - // data.updateByOperResult(getJobOperator().queryPersistedLogFromLine(data.getUser(), - // data.getJobID(), data.getExecID(), fromLine)); - } - } - } - - public void queryLogLoop(LinkisOnceJobData data) { - boolean hasNext = true; - int retryCnt = 0; - final int MAX_RETRY = 12; // continues fails for 90s, then exit thread - try { - while (hasNext) { - try { - queryJobLogOneIteration(data); - } catch (Exception e) { - logger.error("Cannot get inc-log:", e); - // and yes sometimes server may not be able to prepare persisted-log - retryCnt++; - if (retryCnt >= MAX_RETRY) { - logger.error( - "Continuously failing to query inc-log for " - + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 - + "s. Will no longer try to query log", - e); - break; - } - Utils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer - continue; - } - retryCnt = 0; - if (data.isIncLogMode()) { - hasNext = - data.hasNextLogLine() == null - ? !data.getJobStatus().isJobFinishedState() - : data.hasNextLogLine(); - } else { - hasNext = false; - } - if (hasNext) { - String msg = - MessageFormat.format( - "Job is still running, status={0}, progress={1}", - data.getJobStatus(), String.valueOf(data.getJobProgress() * 100) + "%"); - logger.info(msg); - } - Utils.doSleepQuietly(AppConstants.JOB_QUERY_SLEEP_MILLS); - } - } catch (Exception e) { - logger.error("Something goes wrong. Job Log may be incomplete", e); - } finally { - data.sendLogFin(); - } - } - - private void waitIncLogComplete(LinkisJobData data) { - if (!(data instanceof LinkisOnceJobData)) { - return; - } - int retry = 0; - int MAX_RETRY = 300; // wait for 10 minutes after job finish - while (retry++ < MAX_RETRY) { - if (((LinkisOnceJobData) data).logFinReceived()) { - return; - } - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - String msg = - "Job is in Finished state(SUCCEED/FAILED/CANCELED) but client keep querying inclog for " - + (MAX_RETRY * CommonConstants.JOB_QUERY_SLEEP_MILLS / 1000) - + "seconds. Execution ends forcefully. Next will try handle execution result."; - logger.warn(msg); - LogUtils.getInformationLogger().warn(msg); - } - - // /** - // * LogAccessibleJob - // */ - // @Override - // public void startRetrieveLog() { - - // } - - /** ManagableBackendJob */ - @Override - public void doManage() throws LinkisClientRuntimeException {} - - @Override - public boolean isSuccess() { - return data.isSuccess(); - } - - private void doKill() { - String msg; - if (data.getJobStatus().isJobCancelled()) { - msg = "Kill job aborted: Job is failed or has already been canceled."; - data.setSuccess(false); - data.setMessage(msg); - } else if (data.getJobStatus().isJobFinishedState()) { - msg = "Kill job aborted: Job is already in finished-state(SUCCEED/FAILED)."; - data.setSuccess(false); - data.setMessage(msg); - // throw new LinkisClientExecutionException(JobStatus.FAILED, "EXE0004", - // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); - } else { - data.getOnceJobAdapter().kill(); - updateJobStatus(); - data.setSuccess(true); - data.setMessage("successfully killed job"); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisSubmitJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisSubmitJob.java deleted file mode 100644 index d1e71e4e8d..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisSubmitJob.java +++ /dev/null @@ -1,282 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisSubmitJob extends LinkisJob - implements AsyncBackendJob, LogAccessibleJob, ResultAccessibleJob, TerminatableJob { - private static final Logger logger = LoggerFactory.getLogger(LinkisSubmitJob.class); - - private LinkisSubmitDesc jobDesc; - private LinkisJobData data; - private TerminateToken terminateToken = new TerminateToken(); - private LinkisManageJob manageJob = new LinkisManageJob(); - private Boolean isAsync = false; - - public void setAsync(Boolean async) { - isAsync = async; - } - - public Boolean isAsync() { - return isAsync; - } - - @Override - public LinkisJobOperator getJobOperator() { - if (!(super.getJobOperator() instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - return (LinkisJobOperator) super.getJobOperator(); - } - - @Override - public void setOperator(JobOperator operator) { - if (!(operator instanceof LinkisJobOperator)) { - throw new LinkisClientExecutionException( - "EXE0003", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "JobOperator of LinkisManageJob should be instance of LinkisJobOperator"); - } - manageJob.setOperator(operator); - super.setOperator(operator); - } - - @Override - public LinkisSubmitDesc getJobDesc() { - return jobDesc; - } - - public void setJobDesc(LinkisSubmitDesc jobDesc) { - this.jobDesc = jobDesc; - } - - @Override - public LinkisJobData getJobData() { - return data; - } - - public void setJobData(LinkisJobData data) { - manageJob.setJobData(data); - this.data = data; - } - - @Override - public TerminateToken getTerminateToken() { - return terminateToken; - } - - public void setTerminateToken(TerminateToken terminateToken) { - this.terminateToken = terminateToken; - } - - @Override - public void submit() throws LinkisClientRuntimeException { - StringBuilder infoBuilder = new StringBuilder(); - infoBuilder.append("connecting to linkis gateway:").append(getJobOperator().getServerUrl()); - LogUtils.getInformationLogger().info(infoBuilder.toString()); - data.updateByOperResult(getJobOperator().submit(jobDesc)); - CommonUtils.doSleepQuietly(2000l); - LinkisJobManDesc jobManDesc = new LinkisJobManDesc(); - jobManDesc.setJobId(data.getJobID()); - jobManDesc.setUser(data.getUser()); - manageJob.setJobDesc(jobManDesc); - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - infoBuilder.setLength(0); - infoBuilder - .append("JobId:") - .append(data.getJobID()) - .append(System.lineSeparator()) - .append("TaskId:") - .append(data.getJobID()) - .append(System.lineSeparator()) - .append("ExecId:") - .append(data.getExecID()); - LogUtils.getPlaintTextLogger().info(infoBuilder.toString()); - if (isAsync) { - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSubmitted()); - } - } - - @Override - public void updateJobStatus() throws LinkisClientRuntimeException { - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - getJobOperator().queryJobStatus(data.getUser(), data.getJobID(), data.getExecID()); - String log2 = - "\n---------------------------------------------------\n" - + "\ttask " - + data.getJobID() - + " status is " - + data.getJobStatus() - + ", progress : " - + data.getJobProgress() - + "\n---------------------------------------------------"; - logger.info(log2); - } - - @Override - public void waitJobComplete() throws LinkisClientRuntimeException { - int retryCnt = 0; - final int MAX_RETRY = 30; - while (!data.getJobStatus().isJobFinishedState()) { - // query progress - try { - data.updateByOperResult(getJobOperator().queryJobInfo(data.getUser(), data.getJobID())); - getJobOperator().queryJobStatus(data.getUser(), data.getJobID(), data.getExecID()); - } catch (Exception e) { - logger.warn("", e); - retryCnt++; - if (retryCnt >= MAX_RETRY) { - throw new LinkisClientExecutionException( - "EXE0013", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Cannot get jobStatus from server continuously for {0} seconds. Client aborted! Error message: \n", - MAX_RETRY * 5 * CommonConstants.JOB_QUERY_SLEEP_MILLS, - e); - } - CommonUtils.doSleepQuietly( - 5 * CommonConstants.JOB_QUERY_SLEEP_MILLS); // maybe server problem. sleep - // longer - continue; - } - retryCnt = 0; // reset counter - checkJobAvailability(data); - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - data.setSuccess(data.getJobStatus() != null && data.getJobStatus().isJobSuccess()); - waitIncLogComplete(data); - } - - private void waitIncLogComplete(LinkisJobData data) { - if (!(data instanceof LinkisLogData)) { - return; - } - int retry = 0; - int MAX_RETRY = 300; // wait for 10 minutes after job finish - while (retry++ < MAX_RETRY) { - if (((LinkisLogData) data).logFinReceived()) { - return; - } - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - } - String msg = - "Job is in Finished state(SUCCEED/FAILED/CANCELED) but client keep querying inclog for " - + (MAX_RETRY * CommonConstants.JOB_QUERY_SLEEP_MILLS / 1000) - + "seconds. Execution ends forcefully. Next will try handle execution result."; - logger.warn(msg); - LogUtils.getInformationLogger().warn(msg); - } - - @Override - public void startRetrieveResult() { - try { - manageJob.startRetrieveResult(); - data.setSuccess(true); - } catch (LinkisClientExecutionException e) { - if (e.getCode().equals("EXE0037")) { - data.setSuccess(true); - LogUtils.getInformationLogger().warn(e.getMessage()); - } else { - data.setSuccess(false); - data.setException(e); - } - ((LinkisResultData) data).sendResultFin(); // inform listener to stop - } catch (Exception e) { - data.setSuccess(false); - data.setException(e); - ((LinkisResultData) data).sendResultFin(); // inform listener to stop - } - } - - @Override - public void startRetrieveLog() { - if (!(data instanceof LinkisLogData)) { - throw new LinkisClientExecutionException( - "EXE0034", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "JobData is not LinkisLogData"); - } - LinkisLogData dataCopy; - try { - dataCopy = ((LinkisLogData) data).clone(); // make a copy to avoid race condition - } catch (CloneNotSupportedException e) { - throw new LinkisClientExecutionException( - "EXE0035", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "logData is not Cloneable", e); - } - dataCopy.setIncLogMode(true); - manageJob.startRetrieveLogInternal(dataCopy); - } - - @Override - public void terminate() throws LinkisClientRuntimeException { - terminateToken.setTerminate(); - // kill job if job is submitted - if (StringUtils.isNotBlank(data.getJobID())) { - System.out.println("\nKilling job: " + data.getJobID()); - try { - manageJob.doKill(); - if (data.getJobStatus().isJobCancelled()) { - System.out.println("Successfully killed job: " + data.getJobID() + " on exit"); - } else { - System.out.println( - "Failed to kill job: " - + data.getJobID() - + " on exit. Current job status: " - + data.getJobStatus()); - } - } catch (Exception e) { - System.out.println("Failed to kill job: " + data.getJobID() + " on exit"); - System.out.println(ExceptionUtils.getStackTrace(e)); - } - } - } - - private void checkJobAvailability(LinkisJobData data) throws LinkisClientRuntimeException { - if (data.getJobStatus().isJobAbnormalStatus()) { - throw new LinkisClientExecutionException( - "EXE0006", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Job is in abnormal status: " + CommonUtils.GSON.toJson(data)); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisManageJobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisManageJobBuilder.java deleted file mode 100644 index ef5648b993..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisManageJobBuilder.java +++ /dev/null @@ -1,140 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisManageJob; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.application.utils.ExecutionUtils; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.operator.JobOperatorFactory; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashSet; -import java.util.Set; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisManageJobBuilder extends JobBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisSubmitJobBuilder.class); - - LinkisClientListener logListener; - - public LinkisManageJobBuilder setLogListener(LinkisClientListener observer) { - this.logListener = observer; - return this; - } - - @Override - protected LinkisJobManDesc buildJobDesc() { - LinkisJobManDesc desc = new LinkisJobManDesc(); - String osUser = sysVarAccess.getVar(String.class, AppKeys.LINUX_USER_KEY); - String[] adminUsers = StringUtils.split(AppKeys.ADMIN_USERS, ','); - Set adminSet = new HashSet<>(); - for (String admin : adminUsers) { - adminSet.add(admin); - } - String submitUsr = ExecutionUtils.getSubmitUser(stdVarAccess, osUser, adminSet); - - String jobId = null; - if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_KILL_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_KILL_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_STATUS_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_STATUS_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_DESC_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_DESC_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_LOG_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_LOG_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_RESULT_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_RESULT_OPT); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_LIST_OPT)) { - jobId = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_LIST_OPT); - } - - desc.setJobId(jobId); - desc.setUser(submitUsr); - return desc; - } - - @Override - protected LinkisJobData buildJobData() { - LinkisJobDataImpl data = new LinkisJobDataImpl(); - if (logListener == null) { - logger.warn("logListener is not registered, will not be able to display log"); - } else { - data.registerincLogListener(logListener); - } - return data; - } - - @Override - protected LinkisJobOperator buildJobOperator() { - LinkisJobOperator oper; - try { - oper = (LinkisJobOperator) JobOperatorFactory.getReusable(AppKeys.REUSABLE_UJES_CLIENT); - } catch (Exception e) { - throw new LinkisClientRuntimeException( - "BLD0012", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Failed to get a valid operator.", - e); - } - return oper; - } - - @Override - protected PresentWay buildPresentWay() { - PresentWayImpl presentWay = new PresentWayImpl(); - String outputPath = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); - - presentWay.setPath(outputPath); - presentWay.setMode(PresentModeImpl.STDOUT); - if (StringUtils.isNotBlank(outputPath)) { - presentWay.setMode(PresentModeImpl.TEXT_FILE); - } - - return presentWay; - } - - @Override - protected LinkisManageJob getTargetNewInstance() { - return new LinkisManageJob(); - } - - @Override - public LinkisManageJob build() { - ((LinkisManageJob) targetObj).setJobDesc(buildJobDesc()); - ((LinkisManageJob) targetObj).setJobData(buildJobData()); - targetObj.setOperator(buildJobOperator()); - targetObj.setPresentWay(buildPresentWay()); - return (LinkisManageJob) super.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisOnceJobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisOnceJobBuilder.java deleted file mode 100644 index 15a2eb3ac3..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisOnceJobBuilder.java +++ /dev/null @@ -1,239 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisOnceJob; -import org.apache.linkis.cli.application.interactor.job.data.LinkisOnceJobData; -import org.apache.linkis.cli.application.interactor.job.data.SimpleOnceJobAdapter; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.utils.ExecutionUtils; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisOnceJobBuilder extends JobBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisSubmitJobBuilder.class); - - private LinkisClientListener logListener; - private Boolean isAsync = false; - private SimpleOnceJobAdapter onceJobAdapter = new SimpleOnceJobAdapter(); - - public LinkisOnceJobBuilder setLogListener(LinkisClientListener observer) { - this.logListener = observer; - return this; - } - - public LinkisOnceJobBuilder setAsync(Boolean async) { - isAsync = async; - return this; - } - - @Override - protected LinkisOnceJob getTargetNewInstance() { - return new LinkisOnceJob(); - } - - @Override - protected LinkisOnceDesc buildJobDesc() { - LinkisOnceDesc desc = new LinkisOnceDesc(); - - desc.setStdVarAccess(stdVarAccess); - desc.setSysVarAccess(sysVarAccess); - - Map confMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_CONF); - Map runtimeMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_RUNTIME); - Map varMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_VAR); - Map labelMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_LABEL); - Map sourceMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_SOURCE); - Map executionMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_EXEC); - Map jobContentMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_CONTENT); - - confMap = confMap == null ? new HashMap<>() : confMap; - runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; - varMap = varMap == null ? new HashMap<>() : varMap; - labelMap = labelMap == null ? new HashMap<>() : labelMap; - sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; - executionMap = executionMap == null ? new HashMap<>() : executionMap; - jobContentMap = jobContentMap == null ? new HashMap<>() : jobContentMap; - - confMap = ProcessKeyUtils.removePrefixForKeysInMap(confMap); - runtimeMap = ProcessKeyUtils.removePrefixForKeysInMap(runtimeMap); - labelMap = ProcessKeyUtils.removePrefixForKeysInMap(labelMap); - sourceMap = ProcessKeyUtils.removePrefixForKeysInMap(sourceMap); - executionMap = ProcessKeyUtils.removePrefixForKeysInMap(executionMap); - jobContentMap = ProcessKeyUtils.removePrefixForKeysInMap(jobContentMap); - - for (String key : stdVarAccess.getAllVarKeys()) { - Object val = stdVarAccess.getVar(Object.class, key); - if (!(val instanceof Map) && val != null) { - // note that we allow it to overwrite existing values in map - if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_CONF)) { - ProcessKeyUtils.removePrefixAndPutValToMap(confMap, key, val, AppKeys.JOB_PARAM_CONF); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_VAR)) { - ProcessKeyUtils.removePrefixAndPutValToMap(varMap, key, val, AppKeys.JOB_PARAM_VAR); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_RUNTIME)) { - ProcessKeyUtils.removePrefixAndPutValToMap( - runtimeMap, key, val, AppKeys.JOB_PARAM_RUNTIME); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_EXEC)) { - ProcessKeyUtils.removePrefixAndPutValToMap(executionMap, key, val, AppKeys.JOB_EXEC); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_LABEL)) { - ProcessKeyUtils.removePrefixAndPutValToMap(labelMap, key, val, AppKeys.JOB_LABEL); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_SOURCE)) { - ProcessKeyUtils.removePrefixAndPutValToMap(sourceMap, key, val, AppKeys.JOB_SOURCE); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_CONTENT)) { - ProcessKeyUtils.removePrefixAndPutValToMap(jobContentMap, key, val, AppKeys.JOB_CONTENT); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.LINKIS_CLIENT_COMMON)) { - // do nothing - } else { - // confMap.put(key, stdVarAccess.getVar(Object.class, key)); - } - } - } - - String creator; - if (!isAsync) { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_DEFAULT); - } else { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_ASYNC_DEFAULT); - } - String code = stdVarAccess.getVar(String.class, AppKeys.JOB_EXEC_CODE); - String engineType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_ENGINE_TYPE); - String runType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_CODE_TYPE); - String scriptPath = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); - - String osUser = sysVarAccess.getVar(String.class, AppKeys.LINUX_USER_KEY); - String[] adminUsers = StringUtils.split(AppKeys.ADMIN_USERS, ','); - Set adminSet = new HashSet<>(); - for (String admin : adminUsers) { - adminSet.add(admin); - } - String submitUsr = ExecutionUtils.getSubmitUser(stdVarAccess, osUser, adminSet); - String proxyUsr = ExecutionUtils.getProxyUser(stdVarAccess, submitUsr, adminSet); - - String enableExecuteOnce = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_LABEL_EXECUTEONCE, "true"); - // default executeOnce-mode - if (Boolean.parseBoolean(enableExecuteOnce)) { - labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); - } else { - labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); - } - String codePath = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_CODE_PATH); - Object extraArgsObj = stdVarAccess.getVar(Object.class, AppKeys.JOB_EXTRA_ARGUMENTS); - if (extraArgsObj != null - && extraArgsObj instanceof String[] - && StringUtils.isBlank(code) - && StringUtils.isBlank(codePath)) { - String[] extraArgs = (String[]) extraArgsObj; - codePath = extraArgs[0]; - if (extraArgs.length > 1) { - runtimeMap.put( - LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); - } - } - - if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = ExecutionUtils.readFile(codePath); - } - - executionMap.put(LinkisKeys.KEY_CODE, code); - labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); - labelMap.put(LinkisKeys.KEY_CODETYPE, runType); - labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); - sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); - runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); - - desc.setCreator(creator); - desc.setParamConfMap(confMap); - desc.setParamRunTimeMap(runtimeMap); - desc.setParamVarsMap(varMap); - desc.setLabelMap(labelMap); - desc.setSourceMap(sourceMap); - desc.setExecutionMap(executionMap); - desc.setSubmitUser(submitUsr); - desc.setProxyUser(proxyUsr); - desc.setJobContentMap(jobContentMap); - - return desc; - } - - @Override - protected LinkisOnceJobData buildJobData() { - LinkisOnceJobData data = new LinkisOnceJobData(); - data.setOnceJobAdapter(this.onceJobAdapter); - if (logListener == null) { - logger.warn("logListener is not registered, will not be able to display log"); - } else { - data.registerincLogListener(logListener); - } - return data; - } - - @Override - protected JobOperator buildJobOperator() { - // OnceJob is Stateful, should not have an operator - return null; - } - - @Override - protected PresentWay buildPresentWay() { - PresentWayImpl presentWay = new PresentWayImpl(); - String outputPath = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); - - presentWay.setPath(outputPath); - presentWay.setMode(PresentModeImpl.STDOUT); - presentWay.setDisplayMetaAndLogo( - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.LINKIS_COMMON_DIAPLAY_META_LOGO, true)); - if (StringUtils.isNotBlank(outputPath)) { - presentWay.setMode(PresentModeImpl.TEXT_FILE); - } - - return presentWay; - } - - @Override - public LinkisOnceJob build() { - LinkisOnceDesc desc = buildJobDesc(); - ((LinkisOnceJob) targetObj).setJobDesc(desc); - LinkisOnceJobData data = buildJobData(); - ((LinkisOnceJob) targetObj).setJobData(data); - data.getOnceJobAdapter().init(desc); - ((LinkisOnceJob) targetObj).setAsync(isAsync); - targetObj.setOperator(buildJobOperator()); - targetObj.setPresentWay(buildPresentWay()); - return (LinkisOnceJob) super.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisSubmitJobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisSubmitJobBuilder.java deleted file mode 100644 index 6804f09056..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/LinkisSubmitJobBuilder.java +++ /dev/null @@ -1,241 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.builder; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisSubmitJob; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisJobOperator; -import org.apache.linkis.cli.application.utils.ExecutionUtils; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.operator.JobOperatorFactory; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; - -import org.apache.commons.lang3.StringUtils; - -import java.util.*; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class LinkisSubmitJobBuilder extends JobBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisSubmitJobBuilder.class); - - private LinkisClientListener logListener; - private Boolean isAsync = false; - - public LinkisSubmitJobBuilder setLogListener(LinkisClientListener observer) { - this.logListener = observer; - return this; - } - - public LinkisSubmitJobBuilder setAsync(Boolean async) { - isAsync = async; - return this; - } - - @Override - protected LinkisSubmitJob getTargetNewInstance() { - return new LinkisSubmitJob(); - } - - @Override - protected LinkisSubmitDesc buildJobDesc() { - LinkisSubmitDesc desc = new LinkisSubmitDesc(); - - Map confMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_CONF); - Map runtimeMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_RUNTIME); - Map varMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_PARAM_VAR); - Map labelMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_LABEL); - Map sourceMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_SOURCE); - Map executionMap = stdVarAccess.getVar(Map.class, AppKeys.JOB_EXEC); - - confMap = confMap == null ? new HashMap<>() : confMap; - runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; - varMap = varMap == null ? new HashMap<>() : varMap; - labelMap = labelMap == null ? new HashMap<>() : labelMap; - sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; - executionMap = executionMap == null ? new HashMap<>() : executionMap; - - /** remove key prefix of all keys in map type params. e.g. kv in confMap, labelMap etc. */ - confMap = ProcessKeyUtils.removePrefixForKeysInMap(confMap); - runtimeMap = ProcessKeyUtils.removePrefixForKeysInMap(runtimeMap); - labelMap = ProcessKeyUtils.removePrefixForKeysInMap(labelMap); - sourceMap = ProcessKeyUtils.removePrefixForKeysInMap(sourceMap); - executionMap = ProcessKeyUtils.removePrefixForKeysInMap(executionMap); - - /** remove key prefix of non-map type params */ - for (String key : stdVarAccess.getAllVarKeys()) { - Object val = stdVarAccess.getVar(Object.class, key); - if (!(val instanceof Map) && val != null) { - // note that we allow it to overwrite existing values in map - if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_CONF)) { - ProcessKeyUtils.removePrefixAndPutValToMap(confMap, key, val, AppKeys.JOB_PARAM_CONF); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_VAR)) { - ProcessKeyUtils.removePrefixAndPutValToMap(varMap, key, val, AppKeys.JOB_PARAM_VAR); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_PARAM_RUNTIME)) { - ProcessKeyUtils.removePrefixAndPutValToMap( - runtimeMap, key, val, AppKeys.JOB_PARAM_RUNTIME); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_EXEC)) { - ProcessKeyUtils.removePrefixAndPutValToMap(executionMap, key, val, AppKeys.JOB_EXEC); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_LABEL)) { - ProcessKeyUtils.removePrefixAndPutValToMap(labelMap, key, val, AppKeys.JOB_LABEL); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.JOB_SOURCE)) { - ProcessKeyUtils.removePrefixAndPutValToMap(sourceMap, key, val, AppKeys.JOB_SOURCE); - } else if (StringUtils.startsWithIgnoreCase(key, AppKeys.LINKIS_CLIENT_COMMON)) { - // do nothing - } else { - // confMap.put(key, stdVarAccess.getVar(Object.class, key)); - } - } - } - - String creator; - if (!isAsync) { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_DEFAULT); - } else { - creator = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.JOB_COMMON_CREATOR, AppConstants.JOB_CREATOR_ASYNC_DEFAULT); - } - String code = stdVarAccess.getVar(String.class, AppKeys.JOB_EXEC_CODE); - String engineType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_ENGINE_TYPE); - String runType = stdVarAccess.getVar(String.class, AppKeys.JOB_LABEL_CODE_TYPE); - String scriptPath = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); - - String osUser = sysVarAccess.getVar(String.class, AppKeys.LINUX_USER_KEY); - String[] adminUsers = StringUtils.split(AppKeys.ADMIN_USERS, ','); - Set adminSet = new HashSet<>(); - for (String admin : adminUsers) { - adminSet.add(admin); - } - String submitUsr = ExecutionUtils.getSubmitUser(stdVarAccess, osUser, adminSet); - String proxyUsr = ExecutionUtils.getProxyUser(stdVarAccess, submitUsr, adminSet); - - String enableExecuteOnce = - stdVarAccess.getVarOrDefault(String.class, AppKeys.JOB_LABEL_EXECUTEONCE, "true"); - // default executeOnce-mode - if (Boolean.parseBoolean(enableExecuteOnce)) { - labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); - } else { - labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); - } - String codePath = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_CODE_PATH); - Object extraArgsObj = stdVarAccess.getVar(Object.class, AppKeys.JOB_EXTRA_ARGUMENTS); - if (extraArgsObj != null - && extraArgsObj instanceof String[] - && StringUtils.isBlank(code) - && StringUtils.isBlank(codePath)) { - String[] extraArgs = (String[]) extraArgsObj; - codePath = extraArgs[0]; - if (extraArgs.length > 1) { - runtimeMap.put( - LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); - } - } - - if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = ExecutionUtils.readFile(codePath); - } - - executionMap.put(LinkisKeys.KEY_CODE, code); - labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); - labelMap.put(LinkisKeys.KEY_CODETYPE, runType); - labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); - sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); - runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); - - desc.setCreator(creator); - desc.setParamConfMap(confMap); - desc.setParamRunTimeMap(runtimeMap); - desc.setParamVarsMap(varMap); - desc.setLabelMap(labelMap); - desc.setSourceMap(sourceMap); - desc.setExecutionMap(executionMap); - desc.setSubmitUser(submitUsr); - desc.setProxyUser(proxyUsr); - - return desc; - } - - @Override - protected LinkisJobData buildJobData() { - LinkisJobDataImpl data = new LinkisJobDataImpl(); - if (logListener == null) { - logger.warn("logListener is not registered, will not be able to display log"); - } else { - data.registerincLogListener(logListener); - } - return data; - } - - @Override - protected LinkisJobOperator buildJobOperator() { - LinkisJobOperator oper; - try { - oper = (LinkisJobOperator) JobOperatorFactory.getReusable(AppKeys.REUSABLE_UJES_CLIENT); - } catch (Exception e) { - throw new LinkisClientRuntimeException( - "BLD0012", - ErrorLevel.ERROR, - CommonErrMsg.BuilderBuildErr, - "Failed to get a valid operator.", - e); - } - return oper; - } - - @Override - protected PresentWay buildPresentWay() { - PresentWayImpl presentWay = new PresentWayImpl(); - String outputPath = stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); - - presentWay.setPath(outputPath); - presentWay.setMode(PresentModeImpl.STDOUT); - presentWay.setDisplayMetaAndLogo( - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.LINKIS_COMMON_DIAPLAY_META_LOGO, true)); - if (StringUtils.isNotBlank(outputPath)) { - presentWay.setMode(PresentModeImpl.TEXT_FILE); - } - - return presentWay; - } - - @Override - public LinkisSubmitJob build() { - ((LinkisSubmitJob) targetObj).setJobDesc(buildJobDesc()); - ((LinkisSubmitJob) targetObj).setJobData(buildJobData()); - ((LinkisSubmitJob) targetObj).setAsync(isAsync); - targetObj.setOperator(buildJobOperator()); - targetObj.setPresentWay(buildPresentWay()); - return (LinkisSubmitJob) super.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobDataImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobDataImpl.java deleted file mode 100644 index 227e8c2170..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobDataImpl.java +++ /dev/null @@ -1,514 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; -import org.apache.linkis.cli.application.observer.event.LogStartEvent; -import org.apache.linkis.cli.application.observer.event.TriggerEvent; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.observer.listener.TriggerEventListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.common.entity.job.JobStatus; - -import java.util.Arrays; -import java.util.Date; -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.LinkedBlockingDeque; - -public class LinkisJobDataImpl - implements LinkisJobData, LinkisLogData, LinkisResultData, Cloneable { - - private String jobID; - private String user; - private JobStatus jobStatus = null; - private String message; - private Exception exception; - private String execID; - private float progress = 0.0f; - private Boolean incLogMode; - private LinkedBlockingDeque logBuffer = new LinkedBlockingDeque(); - private String logPath; // remote path for job log - private Integer nextLogLineIdx; - private Boolean hasNextLogLine; - private String resultLocation; - private String[] resultSetPaths = null; // remote paths for job result set - private LinkedBlockingDeque resultContent = new LinkedBlockingDeque<>(); - private Boolean hasNextResultPage; - private Integer errCode = null; - private String errDesc = null; - private boolean success = false; - private String instance; - private String umUser; - private String simpleExecId; - private String executionCode; - private String engineType; - private String runType; - private Long costTime; - private Date createdTime; - private Date updatedTime; - private Date engineStartTime; - private String executeApplicationName; - private String requestApplicationName; - - private LinkisClientEvent logstartEvent = new LogStartEvent(); - private TriggerEvent logFinevent = new TriggerEvent(); - private TriggerEventListener logFinListener = new TriggerEventListener(); - private TriggerEvent resultFinEvent = new TriggerEvent(); - private TriggerEventListener resultFinListener = new TriggerEventListener(); - - private boolean hasResult = true; - - { - logFinevent.register(logFinListener); - resultFinEvent.register(resultFinListener); - } - - @Override - public String getJobID() { - return jobID; - } - - public void setJobId(String jobId) { - this.jobID = jobId; - } - - @Override - public String getUser() { - return user; - } - - public void setUser(String user) { - this.user = user; - } - - @Override - public JobStatus getJobStatus() { - return jobStatus; - } - - public void setJobStatus(JobStatus jobStatus) { - this.jobStatus = jobStatus; - } - - @Override - public String getMessage() { - return message; - } - - @Override - public void setMessage(String message) { - this.message = message; - } - - @Override - public Exception getException() { - return exception; - } - - @Override - public void setException(Exception exception) { - this.exception = exception; - } - - @Override - public final String getExecID() { - return execID; - } - - public final void setExecID(String execID) { - this.execID = execID; - } - - @Override - public final float getJobProgress() { - return progress; - } - - public final void setJobProgress(float progress) { - this.progress = progress; - } - - @Override - public final String getLogPath() { - return logPath; - } - - public final void setLogPath(String logPath) { - this.logPath = logPath; - } - - @Override - public final String getResultLocation() { - return resultLocation; - } - - public final void setResultLocation(String resultLocation) { - this.resultLocation = resultLocation; - } - - @Override - public String[] getResultSetPaths() { - return resultSetPaths; - } - - public final void setResultSetPaths(String[] resultSetPaths) { - this.resultSetPaths = resultSetPaths; - } - - @Override - public Integer getErrCode() { - return errCode; - } - - public void setErrCode(Integer errCode) { - this.errCode = errCode; - } - - @Override - public String getErrDesc() { - return errDesc; - } - - public void setErrDesc(String errDesc) { - this.errDesc = errDesc; - } - - @Override - public void registerincLogListener(LinkisClientListener observer) { - this.logstartEvent.register(observer); - } - - @Override - public void notifyLogListener() { - if (this.logstartEvent.isRegistered()) { - logstartEvent.notifyObserver(logstartEvent, this); - } - } - - @Override - public boolean isIncLogMode() { - return this.incLogMode; - } - - @Override - public void setIncLogMode(boolean incLogMode) { - this.incLogMode = incLogMode; - } - - @Override - public String consumeLog() { - List logs = new LinkedList<>(); - this.logBuffer.drainTo(logs, this.logBuffer.size()); - StringBuilder tmp = new StringBuilder(); - for (String str : logs) { - tmp.append(str); - } - return tmp.toString(); - } - - public void appendLog(String log) { - this.logBuffer.add(log); - } - - @Override - public Integer getNextLogLineIdx() { - return nextLogLineIdx; - } - - public void setNextLogLineIdx(Integer nextLogLineIdx) { - this.nextLogLineIdx = nextLogLineIdx; - } - - @Override - public Boolean hasNextLogLine() { - return hasNextLogLine; - } - - @Override - public void setHasNextLogLine(Boolean hasNextLogLine) { - this.hasNextLogLine = hasNextLogLine; - } - - @Override - public List consumeResultContent() { - List ret = new LinkedList<>(); - resultContent.drainTo(ret, resultContent.size()); - return ret; - } - - public void appendResultContent(LinkisResultSet resultContent) { - this.resultContent.add(resultContent); - } - - @Override - public Boolean hasNextResultPage() { - return hasNextResultPage; - } - - public void setHasNextResultPage(Boolean hasNextResultPage) { - this.hasNextResultPage = hasNextResultPage; - } - - @Override - public void sendLogFin() { - if (this.logFinevent != null && this.logFinevent.isRegistered()) { - this.logFinevent.notifyObserver(resultFinEvent, null); - } - } - - @Override - public boolean logFinReceived() { - return this.logFinListener.isTriggered(); - } - - @Override - public void sendResultFin() { - if (this.resultFinEvent != null && this.resultFinEvent.isRegistered()) { - this.resultFinEvent.notifyObserver(resultFinEvent, null); - } - } - - @Override - public boolean resultFinReceived() { - return this.resultFinListener.isTriggered(); - } - - @Override - public boolean hasResult() { - return hasResult; - } - - @Override - public void setHasResult(boolean hasResult) { - this.hasResult = hasResult; - } - - @Override - public boolean isSuccess() { - return success; - } - - @Override - public void setSuccess(boolean success) { - this.success = success; - } - - public String getInstance() { - return instance; - } - - public void setInstance(String instance) { - this.instance = instance; - } - - public String getUmUser() { - return umUser; - } - - public void setUmUser(String umUser) { - this.umUser = umUser; - } - - public String getSimpleExecId() { - return simpleExecId; - } - - public void setSimpleExecId(String simpleExecId) { - this.simpleExecId = simpleExecId; - } - - public String getExecutionCode() { - return executionCode; - } - - public void setExecutionCode(String executionCode) { - this.executionCode = executionCode; - } - - public String getEngineType() { - return engineType; - } - - public void setEngineType(String engineType) { - this.engineType = engineType; - } - - public String getRunType() { - return runType; - } - - public void setRunType(String runType) { - this.runType = runType; - } - - public Long getCostTime() { - return costTime; - } - - public void setCostTime(Long costTime) { - this.costTime = costTime; - } - - public Date getCreatedTime() { - return createdTime; - } - - public void setCreatedTime(Date createdTime) { - this.createdTime = createdTime; - } - - public Date getUpdatedTime() { - return updatedTime; - } - - public void setUpdatedTime(Date updatedTime) { - this.updatedTime = updatedTime; - } - - public Date getEngineStartTime() { - return engineStartTime; - } - - public void setEngineStartTime(Date engineStartTime) { - this.engineStartTime = engineStartTime; - } - - public String getExecuteApplicationName() { - return executeApplicationName; - } - - public void setExecuteApplicationName(String executeApplicationName) { - this.executeApplicationName = executeApplicationName; - } - - public String getRequestApplicationName() { - return requestApplicationName; - } - - public void setRequestApplicationName(String requestApplicationName) { - this.requestApplicationName = requestApplicationName; - } - - @Override - public void updateByOperResult(LinkisOperResultAdapter adapter) { - if (adapter.getJobID() != null) { - setJobId(adapter.getJobID()); - } - if (adapter.getUser() != null) { - setUser(adapter.getUser()); - } - if (adapter.getJobStatus() != null) { - setJobStatus(adapter.getJobStatus()); - } - if (adapter.getStrongerExecId() != null) { - setExecID(adapter.getStrongerExecId()); - } - if (adapter.getJobProgress() != null) { - setJobProgress(adapter.getJobProgress()); - } - if (adapter.getLogPath() != null) { - setLogPath(adapter.getLogPath()); - } - if (adapter.getResultLocation() != null) { - setResultLocation(adapter.getResultLocation()); - } - if (adapter.getResultSetPaths() != null) { - setResultSetPaths(adapter.getResultSetPaths()); - } - if (adapter.getErrCode() != null) { - setErrCode(adapter.getErrCode()); - } - if (adapter.getErrDesc() != null) { - setErrDesc(adapter.getErrDesc()); - } - if (adapter.getLog() != null - && adapter.getNextLogLine() != null - && adapter.hasNextLogLine() != null) { - setNextLogLineIdx(adapter.getNextLogLine()); - setHasNextLogLine(adapter.hasNextLogLine()); - appendLog(adapter.getLog()); - } - if (adapter.getResultContent() != null && adapter.resultHasNextPage() != null) { - setHasNextResultPage(adapter.resultHasNextPage()); - appendResultContent(adapter.getResultContent()); - } - if (adapter.getInstance() != null) { - setInstance(adapter.getInstance()); - } - if (adapter.getUmUser() != null) { - setUmUser(adapter.getUmUser()); - } - if (adapter.getSimpleExecId() != null) { - setSimpleExecId(adapter.getSimpleExecId()); - } - if (adapter.getExecutionCode() != null) { - setExecutionCode(adapter.getExecutionCode()); - } - if (adapter.getEngineType() != null) { - setEngineType(adapter.getEngineType()); - } - if (adapter.getRunType() != null) { - setRunType(adapter.getRunType()); - } - if (adapter.getCostTime() != null) { - setCostTime(adapter.getCostTime()); - } - if (adapter.getCreatedTime() != null) { - setCreatedTime(adapter.getCreatedTime()); - } - if (adapter.getUpdatedTime() != null) { - setUpdatedTime(adapter.getUpdatedTime()); - } - if (adapter.getEngineStartTime() != null) { - setEngineStartTime(adapter.getEngineStartTime()); - } - if (adapter.getExecuteApplicationName() != null) { - setExecuteApplicationName(adapter.getExecuteApplicationName()); - } - if (adapter.getRequestApplicationName() != null) { - setRequestApplicationName(adapter.getRequestApplicationName()); - } - } - - @Override - public LinkisJobDataImpl clone() throws CloneNotSupportedException { - LinkisJobDataImpl ret = (LinkisJobDataImpl) super.clone(); - if (logBuffer != null) { - ret.logBuffer = new LinkedBlockingDeque(this.logBuffer); - } - if (this.resultContent != null) { - ret.resultContent = new LinkedBlockingDeque<>(); - for (LinkisResultSet r1 : resultContent) { - ret.resultContent.add(r1.clone()); - } - } - if (this.resultSetPaths != null) { - ret.setResultSetPaths(Arrays.copyOf(this.resultSetPaths, this.resultSetPaths.length)); - } - /* - These be shared and hence should not be deep copied. - */ - ret.logFinevent = this.logFinevent; - ret.logFinListener = this.logFinListener; - ret.resultFinEvent = this.resultFinEvent; - ret.resultFinListener = this.resultFinListener; - - return ret; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisLogData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisLogData.java deleted file mode 100644 index 3df7cc5a47..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisLogData.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; - -public interface LinkisLogData extends LinkisJobData, Cloneable { - - void notifyLogListener(); - - void registerincLogListener(LinkisClientListener observer); - - String getLogPath(); - - Integer getNextLogLineIdx(); - - void setHasNextLogLine(Boolean hasNextLog); - - Boolean hasNextLogLine(); - - String consumeLog(); - - boolean isIncLogMode(); - - /* - incLogMode = true: for sync-submission, wait for job complete while get incremental log - incLogMode = false: for async-submission, output all log we have currently - */ - void setIncLogMode(boolean incLogMode); - - void sendLogFin(); - - boolean logFinReceived(); - - LinkisLogData clone() throws CloneNotSupportedException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisOnceJobData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisOnceJobData.java deleted file mode 100644 index 89130b62af..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisOnceJobData.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.common.entity.job.JobStatus; - -import java.util.List; - -public class LinkisOnceJobData implements LinkisJobData, LinkisLogData, LinkisResultData { - - private SimpleOnceJobAdapter onceJobAdapter; - - private boolean hasResult = true; - - public SimpleOnceJobAdapter getOnceJobAdapter() { - return onceJobAdapter; - } - - public void setOnceJobAdapter(SimpleOnceJobAdapter onceJobAdapter) { - this.onceJobAdapter = onceJobAdapter; - } - - @Override - public void registerincLogListener(LinkisClientListener observer) { - onceJobAdapter.registerincLogListener(observer); - } - - @Override - public void notifyLogListener() { - onceJobAdapter.notifyLogListener(); - } - - @Override - public boolean isIncLogMode() { - return onceJobAdapter.isIncLogMode(); - } - - @Override - public void setIncLogMode(boolean incLogMode) { - onceJobAdapter.setIncLogMode(incLogMode); - } - - @Override - public String consumeLog() { - return onceJobAdapter.consumeLog(); - } - - public void appendLog(String log) { - onceJobAdapter.appendLog(log); - } - - @Override - public final String getLogPath() { - return onceJobAdapter.getLogPath(); - } - - public final void setLogPath(String logPath) { - onceJobAdapter.setLogPath(logPath); - } - - @Override - public Integer getNextLogLineIdx() { - return onceJobAdapter.getNextLogLineIdx(); - } - - public void setNextLogLineIdx(Integer nextLogLineIdx) { - onceJobAdapter.setNextLogLineIdx(nextLogLineIdx); - } - - @Override - public Boolean hasNextLogLine() { - return onceJobAdapter.hasNextLogLine(); - } - - @Override - public void setHasNextLogLine(Boolean hasNextLogLine) { - onceJobAdapter.setHasNextLogLine(hasNextLogLine); - } - - @Override - public List consumeResultContent() { - return onceJobAdapter.consumeResultContent(); - } - - public void appendResultContent(LinkisResultSet resultContent) { - onceJobAdapter.appendResultContent(resultContent); - } - - @Override - public Boolean hasNextResultPage() { - return onceJobAdapter.hasNextResultPage(); - } - - public void setHasNextResultPage(Boolean hasNextResultPage) { - onceJobAdapter.setHasNextResultPage(hasNextResultPage); - } - - @Override - public final String getResultLocation() { - return onceJobAdapter.getResultLocation(); - } - - public final void setResultLocation(String resultLocation) { - onceJobAdapter.setResultLocation(resultLocation); - } - - @Override - public String[] getResultSetPaths() { - return onceJobAdapter.getResultSetPaths(); - } - - public final void setResultSetPaths(String[] resultSetPaths) { - onceJobAdapter.setResultSetPaths(resultSetPaths); - } - - @Override - public void sendLogFin() { - onceJobAdapter.sendLogFin(); - } - - @Override - public boolean logFinReceived() { - return onceJobAdapter.logFinReceived(); - } - - @Override - public void sendResultFin() { - onceJobAdapter.sendResultFin(); - } - - @Override - public boolean resultFinReceived() { - return onceJobAdapter.resultFinReceived(); - } - - @Override - public boolean hasResult() { - return hasResult; - } - - @Override - public void setHasResult(boolean hasResult) { - this.hasResult = hasResult; - } - - @Override - public JobStatus getJobStatus() { - return onceJobAdapter.getJobStatus(); - } - - public void setJobStatus(JobStatus jobStatus) { - onceJobAdapter.setJobStatus(jobStatus); - } - - @Override - public String getJobID() { - return onceJobAdapter.getJobID(); - } - - @Override - public String getUser() { - return onceJobAdapter.getUser(); - } - - @Override - public String getMessage() { - return onceJobAdapter.getMessage(); - } - - @Override - public void setMessage(String message) { - onceJobAdapter.setMessage(message); - } - - @Override - public Exception getException() { - return onceJobAdapter.getException(); - } - - @Override - public void setException(Exception e) { - onceJobAdapter.setException(e); - } - - @Override - public String getExecID() { - return onceJobAdapter.getJobID(); - } // No Need - - @Override - public float getJobProgress() { - return 0; - } - - @Override - public Integer getErrCode() { - return onceJobAdapter.getErrCode(); - } - - @Override - public String getErrDesc() { - return onceJobAdapter.getErrDesc(); - } - - @Override - public boolean isSuccess() { - return onceJobAdapter.isSuccess(); - } - - @Override - public void setSuccess(boolean success) { - onceJobAdapter.setSuccess(success); - } - - @Override - public void updateByOperResult(LinkisOperResultAdapter adapter) { - // No need - } - - @Override - public LinkisOnceJobData clone() throws CloneNotSupportedException { - throw new CloneNotSupportedException(); - // return null; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/SimpleOnceJobAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/SimpleOnceJobAdapter.java deleted file mode 100644 index 69a76d6240..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/SimpleOnceJobAdapter.java +++ /dev/null @@ -1,377 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.data; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisJobStatus; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; -import org.apache.linkis.cli.application.observer.event.LogStartEvent; -import org.apache.linkis.cli.application.observer.event.TriggerEvent; -import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; -import org.apache.linkis.cli.application.observer.listener.TriggerEventListener; -import org.apache.linkis.cli.application.operator.once.OnceJobConstants; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.computation.client.LinkisJobBuilder$; -import org.apache.linkis.computation.client.once.simple.SimpleOnceJob; -import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder; -import org.apache.linkis.computation.client.once.simple.SubmittableSimpleOnceJob; -import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator; -import org.apache.linkis.computation.client.operator.impl.EngineConnLogs; - -import org.apache.commons.lang3.StringUtils; - -import java.util.LinkedList; -import java.util.List; -import java.util.concurrent.LinkedBlockingDeque; - -public class SimpleOnceJobAdapter implements LinkisLogData { - LinkisJobStatus jobStatus = LinkisJobStatus.UNSUBMITTED; - EngineConnLogOperator logOperator = null; - private String serverUrl; - private SimpleOnceJob onceJob; - private String engineTypeForECM; - private String message; - private Exception exception; - private boolean success; - private Boolean incLogMode; - private LinkedBlockingDeque logBuffer = new LinkedBlockingDeque(); - // private String logPath; // remote path for job log - // private Integer nextLogLineIdx = 0; - private Boolean hasNextLogLine = true; - private String resultLocation; - private String[] resultSetPaths = null; // remote paths for job result set - private LinkedBlockingDeque resultContent = new LinkedBlockingDeque<>(); - private Boolean hasNextResultPage; - private LinkisClientEvent logstartEvent = new LogStartEvent(); - private TriggerEvent logFinEvent = new TriggerEvent(); - private TriggerEventListener logFinListener = new TriggerEventListener(); - private TriggerEvent resultFinEvent = new TriggerEvent(); - private TriggerEventListener resultFinListener = new TriggerEventListener(); - - { - logFinEvent.register(logFinListener); - resultFinEvent.register(resultFinListener); - } - - public void init(LinkisOnceDesc desc) { - VarAccess stdVarAccess = desc.getStdVarAccess(); - VarAccess sysVarAccess = desc.getSysVarAccess(); - - serverUrl = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_GATEWAY_URL); - - LinkisJobBuilder$.MODULE$.setDefaultClientConfig( - UJESClientFactory.generateDWSClientConfig(stdVarAccess, sysVarAccess)); - LinkisJobBuilder$.MODULE$.setDefaultUJESClient( - UJESClientFactory.getReusable(stdVarAccess, sysVarAccess)); - - String engineTypeRaw = (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE); - engineTypeForECM = engineTypeRaw; - - if (StringUtils.isNotBlank(engineTypeRaw)) { - engineTypeForECM = StringUtils.split(engineTypeRaw, "-")[0]; - } else { - engineTypeForECM = ""; - } // TODO: remove parsing and let server side parse engineType - - onceJob = - new SimpleOnceJobBuilder() - .setCreateService(AppConstants.LINKIS_CLI) - .addExecuteUser(desc.getProxyUser()) - .setStartupParams(desc.getParamConfMap()) - .setLabels(desc.getLabelMap()) - .setRuntimeParams(desc.getParamRunTimeMap()) - .setSource(desc.getSourceMap()) - .setVariableMap(desc.getParamVarsMap()) - .setJobContent(desc.getJobContentMap()) - .build(); - } - - public String getServerUrl() { - return serverUrl; - } - - public SimpleOnceJob getOnceJob() { - return onceJob; - } - - public void setOnceJob(SimpleOnceJob onceJob) { - this.onceJob = onceJob; - } - - private void panicIfNull(Object obj) { - if (obj == null) { - throw new LinkisClientExecutionException( - "EXE0040", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Instance of " + obj.getClass().getCanonicalName() + " is null"); - } - } - - public void submit() { - panicIfNull(onceJob); - if (!(onceJob instanceof SubmittableSimpleOnceJob)) { - throw new LinkisClientExecutionException( - "EXE0041", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "onceJob is not properly initiated"); - } - ((SubmittableSimpleOnceJob) onceJob).submit(); - } - - public void kill() { - panicIfNull(onceJob); - onceJob.kill(); - } - - public String getJobID() { - return onceJob.getId(); - } - - @Override - public String getUser() { - return "TODO"; - } - - public void updateStatus() { - panicIfNull(onceJob); - String status = onceJob.getStatus(); - panicIfNull(status); - jobStatus = LinkisJobStatus.convertFromNodeStatusString(onceJob.getStatus()); - } - - public LinkisJobStatus getJobStatus() { - return this.jobStatus; - } - - public void setJobStatus(JobStatus jobStatus) { - this.jobStatus = (LinkisJobStatus) jobStatus; - } - - public void waitForComplete() { - panicIfNull(onceJob); - onceJob.waitForCompleted(); - } - - public void queryJobLogOneIteration() { - panicIfNull(onceJob); - updateStatus(); - if (logOperator == null) { - logOperator = - (EngineConnLogOperator) onceJob.getOperator(EngineConnLogOperator.OPERATOR_NAME()); - logOperator.setECMServiceInstance( - ((SubmittableSimpleOnceJob) onceJob).getECMServiceInstance()); - logOperator.setEngineConnType(engineTypeForECM); - // logOperator.setPageSize(OnceJobConstants.MAX_LOG_SIZE_ONCE); - logOperator.setIgnoreKeywords(OnceJobConstants.LOG_IGNORE_KEYWORDS); - } - EngineConnLogs logs = - (EngineConnLogs) logOperator.apply(); // for some reason we have to add type conversion, - // otherwise mvn testCompile fails - StringBuilder logBuilder = new StringBuilder(); - for (String log : logs.logs()) { - logBuilder.append(log).append(System.lineSeparator()); - } - appendLog(logBuilder.toString()); - if ((logs.logs() == null || logs.logs().size() <= 0) && jobStatus.isJobFinishedState()) { - setHasNextLogLine(false); - } - // System.out.println(logs.logs().size()); - } - - public void registerincLogListener(LinkisClientListener observer) { - this.logstartEvent.register(observer); - } - - public void notifyLogListener() { - if (this.logstartEvent.isRegistered()) { - logstartEvent.notifyObserver(logstartEvent, this); - } - } - - public boolean isIncLogMode() { - return this.incLogMode; - } - - public void setIncLogMode(boolean incLogMode) { - this.incLogMode = incLogMode; - } - - public String consumeLog() { - List logs = new LinkedList<>(); - this.logBuffer.drainTo(logs, this.logBuffer.size()); - StringBuilder tmp = new StringBuilder(); - for (String str : logs) { - tmp.append(str); - } - return tmp.toString(); - } - - public void appendLog(String log) { - this.logBuffer.add(log); - } - - public final String getLogPath() { - return null; - } - - public final void setLogPath(String logPath) { - return; - } - - public Integer getNextLogLineIdx() { - return null; - } - - public void setNextLogLineIdx(Integer nextLogLineIdx) { - return; - } - - public Boolean hasNextLogLine() { - return hasNextLogLine; - } - - public void setHasNextLogLine(Boolean hasNextLogLine) { - this.hasNextLogLine = hasNextLogLine; - } - - public List consumeResultContent() { - List ret = new LinkedList<>(); - resultContent.drainTo(ret, resultContent.size()); - return ret; - } - - public void appendResultContent(LinkisResultSet resultContent) { - this.resultContent.add(resultContent); - } - - public Boolean hasNextResultPage() { - return hasNextResultPage; - } - - public void setHasNextResultPage(Boolean hasNextResultPage) { - this.hasNextResultPage = hasNextResultPage; - } - - public final String getResultLocation() { - return resultLocation; - } - - public final void setResultLocation(String resultLocation) { - this.resultLocation = resultLocation; - } - - public String[] getResultSetPaths() { - return resultSetPaths; - } - - public final void setResultSetPaths(String[] resultSetPaths) { - this.resultSetPaths = resultSetPaths; - } - - public void sendLogFin() { - if (this.logFinEvent != null && this.logFinEvent.isRegistered()) { - this.logFinEvent.notifyObserver(resultFinEvent, null); - } - } - - public boolean logFinReceived() { - return this.logFinListener.isTriggered(); - } - - public void sendResultFin() { - if (this.resultFinEvent != null && this.resultFinEvent.isRegistered()) { - this.resultFinEvent.notifyObserver(resultFinEvent, null); - } - } - - public boolean resultFinReceived() { - return this.resultFinListener.isTriggered(); - } - - @Override - public String getMessage() { - return message; - } - - @Override - public void setMessage(String message) { - this.message = message; - } - - @Override - public Exception getException() { - return exception; - } - - @Override - public void setException(Exception e) { - this.exception = e; - } - - @Override - public String getExecID() { - return getJobID(); - } // No Need - - @Override - public float getJobProgress() { - return 0; - } - - @Override - public Integer getErrCode() { - return null; - } - - @Override - public String getErrDesc() { - return null; - } - - @Override - public boolean isSuccess() { - return success; - } - - @Override - public void setSuccess(boolean success) { - this.success = success; - } - - @Override - public void updateByOperResult(LinkisOperResultAdapter adapter) { - // No need - } - - @Override - public LinkisLogData clone() throws CloneNotSupportedException { - throw new CloneNotSupportedException(); - // return null; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/OperatorUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/OperatorUtils.java deleted file mode 100644 index 767d44aea1..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/OperatorUtils.java +++ /dev/null @@ -1,70 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.operator; - -public class OperatorUtils { - public static int getNumOfLines(String str) { - if (str == null || str.length() == 0) { - return 0; - } - int lines = 1; - int len = str.length(); - for (int pos = 0; pos < len; pos++) { - char c = str.charAt(pos); - if (c == '\r') { - lines++; - if (pos + 1 < len && str.charAt(pos + 1) == '\n') { - pos++; - } - } else if (c == '\n') { - lines++; - } - } - return lines; - } - - public static int getFirstIndexSkippingLines(String str, Integer lines) { - if (str == null || str.length() == 0 || lines < 0) { - return -1; - } - if (lines == 0) { - return 0; - } - - int curLineIdx = 0; - int len = str.length(); - for (int pos = 0; pos < len; pos++) { - char c = str.charAt(pos); - if (c == '\r') { - curLineIdx++; - if (pos + 1 < len && str.charAt(pos + 1) == '\n') { - pos++; - } - } else if (c == '\n') { - curLineIdx++; - } else { - continue; - } - - if (curLineIdx >= lines) { - return pos + 1; - } - } - return -1; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultInfoPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultInfoPresenter.java deleted file mode 100644 index 5a953fa5d7..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultInfoPresenter.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present; - -import org.apache.linkis.cli.application.present.model.LinkisResultInfoModel; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -public class LinkisResultInfoPresenter implements Presenter { - @Override - public void present(Model model, PresentWay presentWay) { - if (!(model instanceof LinkisResultInfoModel)) { - throw new PresenterException( - "PST0001", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input model for \"LinkisResultInfoPresenter\" is not instance of \"LinkisResultInfoModel\""); - } - DisplayOperFactory.getDisplayOper(PresentModeImpl.STDOUT) - .doOutput(new StdoutDisplayData(formatResultIndicator((LinkisResultInfoModel) model))); - } - - protected String formatResultIndicator(LinkisResultInfoModel model) { - StringBuilder infoBuilder = new StringBuilder(); - String extraMsgStr = ""; - - if (model.getMessage() != null) { - extraMsgStr = model.getMessage().toString(); - } - if (model.getJobStatus().isJobSuccess()) { - - LogUtils.getInformationLogger().info("Job execute successfully! Will try get execute result"); - infoBuilder - .append("============Result:================") - .append(System.lineSeparator()) - .append("TaskId:") - .append(model.getJobID()) - .append(System.lineSeparator()) - .append("ExecId: ") - .append(model.getExecID()) - .append(System.lineSeparator()) - .append("User:") - .append(model.getUser()) - .append(System.lineSeparator()) - .append("Current job status:") - .append(model.getJobStatus()) - .append(System.lineSeparator()) - .append("extraMsg: ") - .append(extraMsgStr) - .append(System.lineSeparator()) - .append("result: ") - .append(extraMsgStr) - .append(System.lineSeparator()); - } else if (model.getJobStatus().isJobFinishedState()) { - LogUtils.getInformationLogger().info("Job failed! Will not try get execute result."); - infoBuilder - .append("============Result:================") - .append(System.lineSeparator()) - .append("TaskId:") - .append(model.getJobID()) - .append(System.lineSeparator()) - .append("ExecId: ") - .append(model.getExecID()) - .append(System.lineSeparator()) - .append("User:") - .append(model.getUser()) - .append(System.lineSeparator()) - .append("Current job status:") - .append(model.getJobStatus()) - .append(System.lineSeparator()) - .append("extraMsg: ") - .append(extraMsgStr) - .append(System.lineSeparator()); - if (model.getErrCode() != null) { - infoBuilder.append("errCode: ").append(model.getErrCode()).append(System.lineSeparator()); - } - if (StringUtils.isNotBlank(model.getErrDesc())) { - infoBuilder.append("errDesc: ").append(model.getErrDesc()).append(System.lineSeparator()); - } - } else { - throw new PresenterException( - "PST0011", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Job is not completed but triggered ResultPresenter"); - } - return infoBuilder.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultPresenter.java deleted file mode 100644 index b5e39c4e2e..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisResultPresenter.java +++ /dev/null @@ -1,232 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.application.present.model.LinkisResultModel; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.PresentWayImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.data.FileDisplayData; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.text.MessageFormat; -import java.util.*; - -public class LinkisResultPresenter implements Presenter { - - @Override - public void present(Model model, PresentWay presentWay) { - if (!(model instanceof LinkisResultModel)) { - throw new PresenterException( - "PST0001", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input model for \"LinkisResultPresenter\" is not instance of \"LinkisResultModel\""); - } - if (!(presentWay instanceof PresentWayImpl)) { - throw new PresenterException( - "PST0002", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Input PresentWay for \"LinkisResultPresenter\" is not instance of \"PresentWayImpl\""); - } - LinkisResultModel resultModel = (LinkisResultModel) model; - PresentWayImpl presentWay1 = (PresentWayImpl) presentWay; - - if (!resultModel.getJobStatus().isJobSuccess()) { - LogUtils.getInformationLogger() - .info("JobStatus is not \'success\'. Will not retrieve result-set."); - return; - } - String msg = ""; - if (resultModel.hasResult()) { - msg = - "Retrieving result-set, may take time if result-set is large, please do not exit program."; - } else { - msg = "Your job has no result."; - } - LogUtils.getInformationLogger().info(msg); - - final DisplayOperator displayOperator = - DisplayOperFactory.getDisplayOper( - presentWay1.getMode()); // currently we don't allow printing log to file here - - int preIdx = -1; - StringBuilder resultSb = new StringBuilder(); - - while (!resultModel.resultFinReceived()) { - preIdx = presentOneIteration(resultModel, preIdx, presentWay1, resultSb, displayOperator); - CommonUtils.doSleepQuietly(500l); - } - presentOneIteration(resultModel, preIdx, presentWay1, resultSb, displayOperator); - - if (presentWay1.getMode() == PresentModeImpl.TEXT_FILE - || StringUtils.isNotBlank(presentWay1.getPath())) { - LogUtils.getInformationLogger() - .info("ResultSet has been successfully written to path: " + presentWay1.getPath()); - } - } - - protected int presentOneIteration( - LinkisResultModel resultModel, - int preIdx, - PresentWayImpl presentWay, - StringBuilder resultSb, - DisplayOperator displayOperator) { - List linkisResultSets = resultModel.consumeResultContent(); - if (linkisResultSets != null && !linkisResultSets.isEmpty()) { - for (LinkisResultSet c : linkisResultSets) { - int idxResultset = c.getResultsetIdx(); - /** - * Notice: we assume result-sets are visited one by one in non-descending order!!! i.e. - * either idxResultset == preIdx or idxResultset - preIdx == 1 i.e. resultsets[0] -> - * resultsets[1] -> ... - */ - if (idxResultset - preIdx != 0 && idxResultset - preIdx != 1) { - throw new PresenterException( - "PST0002", - ErrorLevel.ERROR, - CommonErrMsg.PresenterErr, - "Linkis resultsets are visited in descending order or are not visited one-by-one"); - } - - boolean flag = idxResultset > preIdx; - if (presentWay.isDisplayMetaAndLogo()) { - if (idxResultset - preIdx == 1) { - resultSb.setLength(0); - resultSb - .append(MessageFormat.format(AppConstants.RESULTSET_LOGO, idxResultset + 1)) - .append(System.lineSeparator()); - if (c.getResultMeta() != null) { - resultSb - .append(AppConstants.RESULTSET_META_BEGIN_LOGO) - .append(System.lineSeparator()); - resultSb.append(formatResultMeta(c.getResultMeta())); - resultSb.append(AppConstants.RESULTSET_META_END_LOGO).append(System.lineSeparator()); - } - } - } - preIdx = idxResultset; - String contentStr = formatResultContent(c.getResultMeta(), c.getContent()); - if (contentStr != null) { - resultSb.append(contentStr); - } - if (resultSb.length() != 0) { - String resultFileName = - resultModel.getUser() - + "-task-" - + resultModel.getJobID() - + "-result-" - + String.valueOf(idxResultset + 1) - + ".txt"; - displayOperator.doOutput( - new FileDisplayData(presentWay.getPath(), resultFileName, resultSb.toString(), flag)); - resultSb.setLength(0); - } - } - } - return preIdx; - } - - protected String formatResultMeta(List> metaData) { - - StringBuilder outputBuilder = new StringBuilder(); - - if (metaData == null || metaData.size() == 0) { - return null; - } - - List titles = new ArrayList<>(); - - // gather keys as title - for (LinkedHashMap mapElement : metaData) { - if (mapElement == null || mapElement.size() == 0) { - continue; - } - - Set> entrySet = mapElement.entrySet(); - if (entrySet == null) { - break; - } - for (Map.Entry entry : entrySet) { - String key = entry.getKey(); - if (key != null && !titles.contains(key)) { - titles.add(key); - outputBuilder.append(key).append("\t"); - } - } - } - - outputBuilder.append(System.lineSeparator()); - - // gather value and print to output - for (LinkedHashMap mapElement : metaData) { - if (mapElement == null || mapElement.size() == 0) { - continue; - } - String candidate; - for (String title : titles) { - if (mapElement.containsKey(title)) { - candidate = mapElement.get(title); - } else { - candidate = "NULL"; - } - outputBuilder.append(candidate).append("\t"); - } - outputBuilder.append(System.lineSeparator()); - } - return outputBuilder.toString(); - } - - protected String formatResultContent( - List> metaData, List> contentData) { - - StringBuilder outputBuilder = new StringBuilder(); - if (contentData == null || contentData.size() == 0) { // finished - return null; - } - - int listLen = contentData.size(); - for (int i = 0; i < listLen; i++) { - List listElement = contentData.get(i); - if (listElement == null || listElement.size() == 0) { - continue; - } - for (String element : listElement) { - outputBuilder.append(element).append("\t"); - } - if (i < listLen - 1) { - outputBuilder.append(System.lineSeparator()); - } - } - - return outputBuilder.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java deleted file mode 100644 index acfc392e48..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java +++ /dev/null @@ -1,97 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import java.util.Date; - -public class LinkisJobInfoModel implements Model { - - private String cid; - private String jobId; - private String message; - private String exception; - private String cause; - - private String taskID; - private String instance; - private String simpleExecId; - private String execId; - private String umUser; - private String executionCode; - private String logPath; - private JobStatus status; - private String engineType; - private String runType; - private Long costTime; - private Date createdTime; - private Date updatedTime; - private Date engineStartTime; - private Integer errCode; - private String errMsg; - private String executeApplicationName; - private String requestApplicationName; - private Float progress; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisJobDataImpl)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisJobInfoModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisJobDataImpl\""); - } - this.jobId = ((LinkisJobDataImpl) data).getJobID(); - this.message = ((LinkisJobDataImpl) data).getMessage(); - this.taskID = ((LinkisJobDataImpl) data).getJobID(); - this.instance = ((LinkisJobDataImpl) data).getInstance(); - this.simpleExecId = ((LinkisJobDataImpl) data).getSimpleExecId(); - this.execId = ((LinkisJobDataImpl) data).getExecID(); - this.umUser = ((LinkisJobDataImpl) data).getUmUser(); - this.executionCode = ((LinkisJobDataImpl) data).getExecutionCode(); - this.logPath = ((LinkisJobDataImpl) data).getLogPath(); - this.status = ((LinkisJobDataImpl) data).getJobStatus(); - this.engineType = ((LinkisJobDataImpl) data).getEngineType(); - this.runType = ((LinkisJobDataImpl) data).getRunType(); - this.costTime = ((LinkisJobDataImpl) data).getCostTime(); - this.createdTime = ((LinkisJobDataImpl) data).getCreatedTime(); - this.updatedTime = ((LinkisJobDataImpl) data).getUpdatedTime(); - this.engineStartTime = ((LinkisJobDataImpl) data).getEngineStartTime(); - this.errCode = ((LinkisJobDataImpl) data).getErrCode(); - this.errMsg = ((LinkisJobDataImpl) data).getErrDesc(); - this.executeApplicationName = ((LinkisJobDataImpl) data).getExecuteApplicationName(); - this.requestApplicationName = ((LinkisJobDataImpl) data).getRequestApplicationName(); - this.progress = ((LinkisJobDataImpl) data).getJobProgress(); - Exception e = ((LinkisJobDataImpl) data).getException(); - if (e != null) { - this.exception = ExceptionUtils.getMessage(e); - this.cause = ExceptionUtils.getRootCauseMessage(e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobKillModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobKillModel.java deleted file mode 100644 index 95eb7d03c2..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobKillModel.java +++ /dev/null @@ -1,63 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -public class LinkisJobKillModel implements Model { - - private String cid; - private String jobId; - private String message; - private String exception; - private String cause; - - private String execID; - private String user; - private JobStatus jobStatus; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisJobDataImpl)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisJobKillModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisJobDataImpl\""); - } - this.jobId = ((LinkisJobDataImpl) data).getJobID(); - this.message = ((LinkisJobDataImpl) data).getMessage(); - this.execID = ((LinkisJobDataImpl) data).getExecID(); - this.user = ((LinkisJobDataImpl) data).getUser(); - this.jobStatus = ((LinkisJobDataImpl) data).getJobStatus(); - Exception e = ((LinkisJobDataImpl) data).getException(); - if (e != null) { - this.exception = ExceptionUtils.getMessage(e); - this.cause = ExceptionUtils.getRootCauseMessage(e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultInfoModel.java deleted file mode 100644 index bfa0ae6cc8..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultInfoModel.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -public class LinkisResultInfoModel implements Model { - private String jobID; - private String execID; - private String user; - private JobStatus jobStatus; - private String message; - private Integer errCode; - private String errDesc; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisResultData)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisResultInfoModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisResultData\""); - } - LinkisResultData jobData = (LinkisResultData) data; - jobID = jobData.getJobID(); - execID = jobData.getExecID(); - user = jobData.getUser(); - jobStatus = jobData.getJobStatus(); - message = jobData.getMessage(); - errCode = jobData.getErrCode(); - errDesc = jobData.getErrDesc(); - } - - public String getJobID() { - return jobID; - } - - public String getExecID() { - return execID; - } - - public String getUser() { - return user; - } - - public JobStatus getJobStatus() { - return jobStatus; - } - - public String getMessage() { - return message; - } - - public Integer getErrCode() { - return errCode; - } - - public String getErrDesc() { - return errDesc; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisSubmitResultModel.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisSubmitResultModel.java deleted file mode 100644 index 6b622e1453..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisSubmitResultModel.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.present.model; - -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobDataImpl; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -public class LinkisSubmitResultModel implements Model { - private String jobId; - private JobStatus status; - private String message; - private String exception; - private String cause; - - @Override - public void buildModel(Object data) { - if (!(data instanceof LinkisJobDataImpl)) { - throw new TransformerException( - "TFM0010", - ErrorLevel.ERROR, - CommonErrMsg.TransformerException, - "Failed to init LinkisJobInfoModel: " - + data.getClass().getCanonicalName() - + "is not instance of \"LinkisJobDataImpl\""); - } - this.jobId = ((LinkisJobDataImpl) data).getJobID(); - this.status = ((LinkisJobDataImpl) data).getJobStatus(); - this.message = ((LinkisJobDataImpl) data).getMessage(); - Exception e = ((LinkisJobDataImpl) data).getException(); - if (e != null) { - this.exception = ExceptionUtils.getMessage(e); - this.cause = ExceptionUtils.getRootCauseMessage(e); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuite.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuite.java deleted file mode 100644 index 8b05a7cf78..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuite.java +++ /dev/null @@ -1,61 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.suite; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ResultHandler; - -import java.util.Map; - -public class ExecutionSuite { - Execution execution; - Map jobs; - ResultHandler[] resultHandlers; - - public ExecutionSuite( - Execution execution, Map jobs, ResultHandler... resultHandlers) { - this.execution = execution; - this.jobs = jobs; - this.resultHandlers = resultHandlers; - } - - public Execution getExecution() { - return execution; - } - - public void setExecution(Execution execution) { - this.execution = execution; - } - - public Map getJobs() { - return jobs; - } - - public void setJobs(Map jobs) { - this.jobs = jobs; - } - - public ResultHandler[] getResultHandlers() { - return resultHandlers; - } - - public void setResultHandlers(ResultHandler[] resultHandlers) { - this.resultHandlers = resultHandlers; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuiteFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuiteFactory.java deleted file mode 100644 index c55ee7b9e4..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/suite/ExecutionSuiteFactory.java +++ /dev/null @@ -1,225 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.suite; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.interactor.command.LinkisCmdType; -import org.apache.linkis.cli.application.interactor.job.builder.LinkisManageJobBuilder; -import org.apache.linkis.cli.application.interactor.job.builder.LinkisOnceJobBuilder; -import org.apache.linkis.cli.application.interactor.job.builder.LinkisSubmitJobBuilder; -import org.apache.linkis.cli.application.interactor.job.subtype.LinkisManSubType; -import org.apache.linkis.cli.application.interactor.job.subtype.LinkisSubmitSubType; -import org.apache.linkis.cli.application.interactor.validate.LinkisManageValidator; -import org.apache.linkis.cli.application.interactor.validate.LinkisOnceSubmitValidator; -import org.apache.linkis.cli.application.interactor.validate.LinkisSubmitValidator; -import org.apache.linkis.cli.application.present.LinkisLogPresenter; -import org.apache.linkis.cli.application.present.LinkisResultInfoPresenter; -import org.apache.linkis.cli.application.present.LinkisResultPresenter; -import org.apache.linkis.cli.application.present.model.LinkisJobInfoModel; -import org.apache.linkis.cli.application.present.model.LinkisJobKillModel; -import org.apache.linkis.cli.application.present.model.LinkisResultInfoModel; -import org.apache.linkis.cli.application.present.model.LinkisResultModel; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.execution.AsyncSubmission; -import org.apache.linkis.cli.core.interactor.execution.Help; -import org.apache.linkis.cli.core.interactor.execution.JobManagement; -import org.apache.linkis.cli.core.interactor.execution.SyncSubmission; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; -import org.apache.linkis.cli.core.interactor.result.DefaultResultHandler; -import org.apache.linkis.cli.core.interactor.result.PresentResultHandler; -import org.apache.linkis.cli.core.present.DefaultStdOutPresenter; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ExecutionSuiteFactory { - private static Logger logger = LoggerFactory.getLogger(ExecutionSuiteFactory.class); - - public static ExecutionSuite getSuite( - CmdType cmdType, VarAccess stdVarAccess, VarAccess sysVarAccess) { - - ExecutionSuite suite; - Execution execution; - Map jobs = new HashMap<>(); - JobSubType subType; - JobBuilder jobBuilder; - Validator validator; - ResultHandler defaultHandler = new DefaultResultHandler(); - - /* - Prepare Builders and command-specific components - */ - if (cmdType == LinkisCmdType.UNIVERSAL) { - if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_KILL_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new DefaultStdOutPresenter()); - handler.setModel(new LinkisJobKillModel()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.KILL; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_STATUS_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new DefaultStdOutPresenter()); - handler.setModel(new LinkisJobInfoModel()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.STATUS; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_LOG_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder().setLogListener(new LinkisLogPresenter()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.LOG; - suite = new ExecutionSuite(execution, jobs, null, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_RESULT_OPT)) { - execution = new JobManagement(); - jobBuilder = new LinkisManageJobBuilder(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new LinkisResultPresenter()); - handler.setModel(new LinkisResultModel()); - validator = new LinkisManageValidator(); - subType = LinkisManSubType.RESULT; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - } else if (stdVarAccess.hasVar(AppKeys.LINKIS_CLIENT_HELP_OPT)) { - execution = new Help(); - jobs.put( - "help", - new Job() { - @Override - public String getCid() { - return null; - } - - @Override - public CmdType getCmdType() { - return cmdType; - } - - @Override - public JobSubType getSubType() { - return null; - } - - @Override - public JobDescription getJobDesc() { - return null; - } - - @Override - public JobData getJobData() { - return null; - } - - @Override - public JobOperator getJobOperator() { - return null; - } - - @Override - public PresentWay getPresentWay() { - return null; - } - }); - return new ExecutionSuite(execution, jobs, null, defaultHandler); - } else { - Boolean asyncSubmission = - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.LINKIS_CLIENT_ASYNC_OPT, false); - if (asyncSubmission) { - execution = new AsyncSubmission(); - PresentResultHandler handler = new PresentResultHandler(); - handler.setPresenter(new DefaultStdOutPresenter()); - handler.setModel(new LinkisJobInfoModel()); - jobBuilder = new LinkisSubmitJobBuilder().setAsync(true); - subType = LinkisSubmitSubType.SUBMIT; - suite = new ExecutionSuite(execution, jobs, handler, defaultHandler); - validator = new LinkisSubmitValidator(); - } else { - execution = new SyncSubmission(); - subType = LinkisSubmitSubType.SUBMIT; - PresentResultHandler handler1 = new PresentResultHandler(); - handler1.setPresenter(new LinkisResultInfoPresenter()); - handler1.setModel(new LinkisResultInfoModel()); - PresentResultHandler handler2 = new PresentResultHandler(); - handler2.setPresenter(new LinkisResultPresenter()); - handler2.setModel(new LinkisResultModel()); - - String mode = - stdVarAccess.getVarOrDefault( - String.class, AppKeys.LINKIS_CLIENT_MODE_OPT, AppConstants.UJES_MODE); - if (StringUtils.equalsIgnoreCase(mode, AppConstants.ONCE_MODE)) { - jobBuilder = new LinkisOnceJobBuilder().setLogListener(new LinkisLogPresenter()); - ; - validator = new LinkisOnceSubmitValidator(); - } else { - jobBuilder = new LinkisSubmitJobBuilder().setLogListener(new LinkisLogPresenter()); - validator = new LinkisSubmitValidator(); - } - suite = new ExecutionSuite(execution, jobs, handler1, handler2, defaultHandler); - } - } - /* - build job - */ - Job job = - jobBuilder - .setCid(AppConstants.DUMMY_CID) // currently we don't need this - .setCmdType(cmdType) - .setJobSubType(subType) - .setStdVarAccess(stdVarAccess) - .setSysVarAccess(sysVarAccess) - .build(); - logger.info("==========JOB============\n" + Utils.GSON.toJson(job.getJobDesc())); - if (validator != null) { - validator.doValidation(job); - } - - jobs.put(job.getCid(), job); - - return suite; - } else { - throw new LinkisClientExecutionException( - "EXE0029", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Command Type is not supported"); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/Utils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/Utils.java deleted file mode 100644 index e70c32a37b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/Utils.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.utils; - -import org.apache.commons.lang3.StringUtils; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class Utils { - - public static final Gson GSON = - new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); - private static final Logger logger = LoggerFactory.getLogger(Utils.class); - - public static boolean isValidExecId(String execId) { - boolean ret = false; - if (StringUtils.isNotBlank(execId)) { - ret = true; - } - return ret; - } - - public static String progressInPercentage(float progress) { - return String.valueOf(progress * 100) + "%"; - } - - public static void doSleepQuietly(Long sleepMills) { - try { - Thread.sleep(sleepMills); - } catch (Exception ignore) { - // ignored - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/ProcessInputUtil.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/ProcessInputUtil.java deleted file mode 100644 index 33af32bb4b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/ProcessInputUtil.java +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.command.template; - -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.data.ProcessedData; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.Parser; -import org.apache.linkis.cli.core.interactor.command.parser.SingleCmdParser; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.properties.PropertiesLoader; -import org.apache.linkis.cli.core.interactor.properties.PropsFilesScanner; -import org.apache.linkis.cli.core.interactor.properties.StdPropsLoader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysEnvReader; -import org.apache.linkis.cli.core.interactor.properties.reader.SysPropsReader; -import org.apache.linkis.cli.core.interactor.validate.ParsedTplValidator; -import org.apache.linkis.cli.core.interactor.var.StdVarAccess; -import org.apache.linkis.cli.core.interactor.var.SysVarAccess; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class ProcessInputUtil { - private static Logger logger = LoggerFactory.getLogger(ProcessInputUtil.class); - - public static ProcessedData generateProcessedData(String[] cmdStr, CmdTemplate template) { - /* - user input - */ - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(template) - .setFitter(new SingleTplFitter()); - - ParseResult result = parser.parse(cmdStr); - - ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); - parsedTplValidator.doValidation(result.getParsedTemplate()); - - Params params = result.getParams(); - logger.debug("==========params============\n" + Utils.GSON.toJson(params)); - - Map propertiesMap = new HashMap<>(); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - System.setProperty("conf.root", "src/test/resources/conf/"); - System.setProperty("conf.file", "linkis-cli.properties"); - String configPath = System.getProperty("conf.root"); - String defaultConfFileName = System.getProperty("conf.file"); - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - String userConfPath = null; - if (params.containsParam(AppKeys.LINKIS_CLIENT_USER_CONFIG)) { - userConfPath = - (String) params.getParamItemMap().get(AppKeys.LINKIS_CLIENT_USER_CONFIG).getValue(); - } - if (StringUtils.isNotBlank(userConfPath)) { - PropertiesReader reader = - new PropsFileReader() - .setPropsId(AppKeys.LINKIS_CLIENT_USER_CONFIG) - .setPropsPath(userConfPath); - readersList.add(reader); - } else { - LogUtils.getInformationLogger() - .info("User does not provide usr-configuration file. Will use default config"); - } - readersList.add(new SysPropsReader()); - readersList.add(new SysEnvReader()); - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - ClientProperties[] loaderResult = loader.loadProperties(); - for (ClientProperties properties : loaderResult) { - propertiesMap.put(properties.getPropsId(), properties); - } - - VarAccess stdVarAccess = - new StdVarAccess() - .setCmdParams(params) - .setUserConf(propertiesMap.get(AppKeys.LINKIS_CLIENT_USER_CONFIG)) - .setDefaultConf(propertiesMap.get(AppConstants.DEFAULT_CONFIG_NAME)) - .init(); - - VarAccess sysVarAccess = - new SysVarAccess() - .setSysProp(propertiesMap.get(CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER)) - .setSysEnv(propertiesMap.get(CommonConstants.SYSTEM_ENV_IDENTIFIER)); - - return new ProcessedData(null, params.getCmdType(), stdVarAccess, sysVarAccess); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/execution/LinkisExecutorTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/execution/LinkisExecutorTest.java deleted file mode 100644 index b2571049d3..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/execution/LinkisExecutorTest.java +++ /dev/null @@ -1,113 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.execution; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class LinkisExecutorTest { - - @BeforeEach - public void before() {} - - @AfterEach - public void after() {} - - /** Method: setDriver(LinkisClientDriver driver) */ - @Test - public void testSetDriver() { - // TODO: Test goes here... - } - - /** Method: setDriverTransformer(DriverTransformer driverTransformer) */ - @Test - public void testSetDriverTransformer() { - // TODO: Test goes here... - } - - /** Method: getJobID(JobSubmitExec execData) */ - @Test - public void testGetJobID() { - // TODO: Test goes here... - } - - /** Method: submit(Job job) */ - @Test - public void testDoAsyncSubmit() { - // TODO: Test goes here... - } - - /** Method: updateJobStatus(JobSubmitExec execData) */ - @Test - public void testDoUpdateProgress() { - // TODO: Test goes here... - } - - /** Method: checkSubmit(JobSubmitExec execData) */ - @Test - public void testDoCheckSubmit() { - // TODO: Test goes here... - } - - /** Method: doGetFinalResult(JobSubmitExec execData) */ - @Test - public void testDoGetFinalResult() { - // TODO: Test goes here... - } - - /** Method: checkInit() */ - @Test - public void testCheckInit() { - // TODO: Test goes here... - } - - /** Method: doKill(Job job) */ - @Test - public void testDoKill() { - // TODO: Test goes here... - } - - /** Method: doQuery(Job job) */ - @Test - public void testDoQuery() { - // TODO: Test goes here... - } - - /** Method: doTransform(Job job) */ - @Test - public void testDoTransform() { - // TODO: Test goes here... - } - - /** Method: updateExecDataByDwsResult(JobSubmitExec execData, DWSResult result) */ - @Test - public void testUpdateExecDataByDwsResult() { - // TODO: Test goes here... - /* - try { - Method method = LinkisSubmitExecutor.getClass().getMethod("updateExecDataByDwsResult", JobSubmitExec.class, DWSResult.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobBuilderTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobBuilderTest.java deleted file mode 100644 index 99836e8ec6..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobBuilderTest.java +++ /dev/null @@ -1,106 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job; - -import org.apache.linkis.cli.application.data.ProcessedData; -import org.apache.linkis.cli.application.interactor.command.template.ProcessInputUtil; -import org.apache.linkis.cli.application.interactor.command.template.TestSparkCmdTemplate; -import org.apache.linkis.cli.core.interactor.job.JobBuilder; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -public class LinkisJobBuilderTest { - JobBuilder builder; - - @BeforeEach - public void before() { - String[] cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "src/test/resources/conf/user.properties", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - // "--num-executors", "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "-confMap", - "kk=vv,kkk=vvv,spark.executor.instances=8" - }; - ProcessedData data = ProcessInputUtil.generateProcessedData(cmdStr, new TestSparkCmdTemplate()); - // builder = new LinkisJobBuilder() - // .setIdentifier(data.getIdentifier()) - // .setCmdType(data.getCmdType()) - // .setExecutionType(data.getExecutionType()) - // .setSubExecutionType(data.getSubExecutionType()) - // .setStdVarAccess(data.getStdVarAccess()) - // .setSysVarAccess(data.getSysVarAccess()); - } - - @AfterEach - public void after() {} - - /** Method: setIdentifier(String identifier) */ - @Test - public void testSetIdentifier() { - // TODO: Test goes here... - } - - /** Method: build() */ - @Test - public void testBuild() { - // TODO: Test goes here... - // LinkisJob job = builder.build(); - // System.out.println(CommonUtils.GSON.toJson(job)); - // assertEquals(job.getParamConfMap().get("spark.executor.instances"), "8"); - } - - /** Method: setStdVarAccess(VarAccess varAccess) */ - @Test - public void testSetStdVarAccess() { - // TODO: Test goes here... - } - - /** Method: setSysVarAccess(VarAccess varAccess) */ - @Test - public void testSetSysVarAccess() { - // TODO: Test goes here... - } - - /** Method: getTargetNewInstance() */ - @Test - public void testGetTargetNewInstance() { - // TODO: Test goes here... - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubTypeTest.java deleted file mode 100644 index f05d59700e..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubTypeTest.java +++ /dev/null @@ -1,33 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.application.interactor.job.subtype; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class LinkisSubmitSubTypeTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - String submitName = LinkisSubmitSubType.SUBMIT.getName(); - Assertions.assertEquals("submit", submitName); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/Job.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/Job.java deleted file mode 100644 index 5339db3c09..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/Job.java +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.job; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; - -public interface Job { - /** Linkis-cli specified id, not server-side returned job-id/task-id */ - String getCid(); - - /** - * Command Type for this Job, should be able to use this to find out corresponding {@link - * CmdTemplate} - */ - CmdType getCmdType(); - - /** specifies which kind of sub-execution: e.g. jobManagement: status/list/log/kill; */ - JobSubType getSubType(); - - /** - * input-param/config will be stored in JobDescription information contained by this - * data-structure should be passed to server - */ - JobDescription getJobDesc(); - - /** - * data generated during execution(e.g. job status, job id, log, result etc.) is stored here - * information contained by this data-structure can be further passed to server - */ - JobData getJobData(); - - /** operates lower level components(usually encapsulates a client) */ - JobOperator getJobOperator(); - - /** decide how result should be presented */ - PresentWay getPresentWay(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ResultHandler.java deleted file mode 100644 index cf10cb2bf4..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ResultHandler.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.entity.result; - -public interface ResultHandler { - void process(ExecutionResult executionResult); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/handler/ExceptionHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/handler/ExceptionHandler.java deleted file mode 100644 index cb5a756ac3..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/handler/ExceptionHandler.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.exception.handler; - -public interface ExceptionHandler { - void handle(Exception e); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/test/java/org/apache/linkis/cli/common/exception/error/ErrorLevelTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/test/java/org/apache/linkis/cli/common/exception/error/ErrorLevelTest.java deleted file mode 100644 index e6d14f71ea..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/test/java/org/apache/linkis/cli/common/exception/error/ErrorLevelTest.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.common.exception.error; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class ErrorLevelTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - int infoLevel = ErrorLevel.INFO.getLevel(); - int warnLevel = ErrorLevel.WARN.getLevel(); - int errorLevel = ErrorLevel.ERROR.getLevel(); - int fatalLevel = ErrorLevel.FATAL.getLevel(); - int retryLevel = ErrorLevel.RETRY.getLevel(); - - Assertions.assertTrue(0 == infoLevel); - Assertions.assertTrue(1 == warnLevel); - Assertions.assertTrue(2 == errorLevel); - Assertions.assertTrue(3 == fatalLevel); - Assertions.assertTrue(4 == retryLevel); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml deleted file mode 100644 index 4d453144fb..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml +++ /dev/null @@ -1,43 +0,0 @@ - - - - 4.0.0 - - - org.apache.linkis - linkis-cli - ${revision} - - linkis-cli-core - jar - - - - org.apache.linkis - linkis-cli-common - ${project.version} - - - - org.reflections - reflections - - - - - diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/AbstractBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/AbstractBuilder.java deleted file mode 100644 index 8063662cb5..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/AbstractBuilder.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.builder; - -public abstract class AbstractBuilder implements Builder { - protected T targetObj; - - public AbstractBuilder() { - reset(); - } - - protected void reset() { - targetObj = getTargetNewInstance(); - } - - @Override - public T build() { - T ret = targetObj; - reset(); - return ret; - } - - protected abstract T getTargetNewInstance(); - - // protected T getTargetNewInstance() { - // try { - // ParameterizedType pt = (ParameterizedType) this.getClass().getGenericSuperclass(); - // Class clazz = (Class) pt.getActualTypeArguments()[0]; - // targetObj = clazz.newInstance(); - // } catch (InstantiationException e) { - // throw new BuilderException("BLD0001", ErrorLevel.ERROR, - // CommonErrMsg.BuilderBuildErr, "Cannot generate new instance.", e); - // } catch (IllegalAccessException ie) { - // throw new BuilderException("BLD0001", ErrorLevel.ERROR, - // CommonErrMsg.BuilderBuildErr, "Cannot generate new instance.", ie); - // } - // return targetObj; - // } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/BuildableByVarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/BuildableByVarAccess.java deleted file mode 100644 index ca6454e923..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/BuildableByVarAccess.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.builder; - -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.BuilderException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.lang.reflect.ParameterizedType; - -public abstract class BuildableByVarAccess extends AbstractBuilder { - protected VarAccess stdVarAccess; - protected VarAccess sysVarAccess; - - protected void checkInit() { - if (stdVarAccess == null || sysVarAccess == null) { - ParameterizedType pt = (ParameterizedType) this.getClass().getGenericSuperclass(); - Class clazz = (Class) pt.getActualTypeArguments()[0]; - throw new BuilderException( - "BLD0003", - ErrorLevel.ERROR, - CommonErrMsg.BuilderInitErr, - "Cannot init builder: " - + clazz.getCanonicalName() - + "Cause: stdVarAccess or sysVarAccess is null"); - } - stdVarAccess.checkInit(); - sysVarAccess.checkInit(); - } - - public BuildableByVarAccess setStdVarAccess(VarAccess varAccess) { - this.stdVarAccess = varAccess; - return this; - } - - public BuildableByVarAccess setSysVarAccess(VarAccess varAccess) { - this.sysVarAccess = varAccess; - return this; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/Builder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/Builder.java deleted file mode 100644 index e66ba2ee55..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/builder/Builder.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.builder; - -public interface Builder { - T build(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonConstants.java deleted file mode 100644 index 1362539158..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonConstants.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -public class CommonConstants { - - public static final Long JOB_QUERY_SLEEP_MILLS = 2000l; - public static final Integer REQUEST_MAX_RETRY_TIME = 3; - - public static final String UNIVERSAL_SUBCMD = "linkis-cli"; - public static final String UNIVERSAL_SUBCMD_DESC = - "command for all types of jobs supported by Linkis"; - - public static final String SUCCESS_INDICATOR = "############Execute Success!!!########"; - public static final String FAILURE_INDICATOR = "############Execute Error!!!########"; - public static final String ARRAY_SEQ = "@#@"; - public static final String ARRAY_SEQ_REGEX = "(?=([^\"]*\"[^\"]*\")*[^\"]*$)"; - public static final int MAX_NUM_OF_COMMAND_ARGUEMENTS = 10; - public static final String CONFIG_DIR = "config.path"; - public static final String[] CONFIG_EXTENSION = {"properties"}; - public static final String SYSTEM_PROPERTIES_IDENTIFIER = "SYS_PROP"; - public static final String SYSTEM_ENV_IDENTIFIER = "SYS_ENV"; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonKeys.java deleted file mode 100644 index 39eb2f9418..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/constants/CommonKeys.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -public class CommonKeys {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandler.java deleted file mode 100644 index 25d7a04d20..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandler.java +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.handler; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.interactor.command.CmdTemplateFactory; -import org.apache.linkis.cli.core.present.HelpInfoPresenter; -import org.apache.linkis.cli.core.present.model.HelpInfoModel; - -public class CommandExceptionHandler implements ExceptionHandler { - @Override - public void handle(Exception e) { - if (e instanceof CommandException) { - if (((CommandException) e).requireHelp()) { - - CmdTemplate template = - CmdTemplateFactory.getTemplateOri(((CommandException) e).getCmdType()); - - if (template != null) { - HelpInfoModel model = new HelpInfoModel(); - model.buildModel(template); - - new HelpInfoPresenter().present(model, null); - } - } - } - new DefaultExceptionHandler().handle(e); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/DefaultExceptionHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/DefaultExceptionHandler.java deleted file mode 100644 index c83e711513..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/handler/DefaultExceptionHandler.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.handler; - -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.StringUtils; -import org.apache.commons.lang3.exception.ExceptionUtils; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class DefaultExceptionHandler implements ExceptionHandler { - private static Logger logger = LoggerFactory.getLogger(DefaultExceptionHandler.class); - - @Override - public void handle(Exception exception) { - if (exception instanceof LinkisClientRuntimeException) { - LinkisClientRuntimeException e = (LinkisClientRuntimeException) exception; - switch (e.getLevel()) { - case INFO: - logger.info(e.getMessage(), e); - LogUtils.getInformationLogger().info(e.getMessage()); - break; - case WARN: - logger.warn(e.getMessage(), e); - LogUtils.getInformationLogger().warn(getErrInfoWithoutStacktrace(e)); - break; - case ERROR: - logger.error(e.getMessage(), e); - LogUtils.getInformationLogger().error(getErrInfoWithoutStacktrace(e)); - break; - case FATAL: - String msg = StringUtils.substringAfter(e.getMessage(), "[ERROR]"); - logger.error(msg, e); - LogUtils.getInformationLogger().error("[FATAL]" + msg, e); - System.exit(-1); - break; - } - - } else { - logger.error(exception.getMessage(), exception); - LogUtils.getInformationLogger().error(exception.getMessage(), exception); - } - } - - private String getErrInfoWithoutStacktrace(Exception e) { - if (e == null) { - return ""; - } - StringBuilder sb = new StringBuilder(); - if (e instanceof NullPointerException) { - sb.append(ExceptionUtils.getStackTrace(e)); - } else { - sb.append(e.getMessage()); - } - if (e.getCause() != null) { - sb.append(System.lineSeparator()) - .append("Caused by: ") - .append((e.getCause().getClass().getCanonicalName())) - .append(": ") - .append(e.getCause().getMessage()); - } - return sb.toString(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/AsyncSubmission.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/AsyncSubmission.java deleted file mode 100644 index 765c5969d4..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/AsyncSubmission.java +++ /dev/null @@ -1,109 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.AsyncBackendJob; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.utils.CommonUtils; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Execute job asynchronously TODO: put exception during execution in ExecutionResult and do not - * interrupt execution - */ -public class AsyncSubmission implements Execution { - private static final Logger logger = LoggerFactory.getLogger(AsyncSubmission.class); - - @Override - public ExecutionResult execute(Map jobs) { - ExecutionStatus executionStatus; - Exception exception = null; // TODO - - if (jobs == null || jobs.size() == 0) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Null or empty Jobs is submitted to current execution"); - } - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Multiple Jobs is not Supported by current execution"); - } - - Job job = jobs.get(jobs.keySet().toArray(new String[jobs.size()])[0]); - - if (!(job instanceof AsyncBackendJob)) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Backend for \"" + job.getClass().getCanonicalName() + "\" does not support async"); - } - - if (job.getSubType() == null) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "SubExecType should not be null"); - } - - try { - ((AsyncBackendJob) job).submit(); - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - ((AsyncBackendJob) job).updateJobStatus(); - if (job.getJobData().getJobStatus().isJobSubmitted()) { - executionStatus = ExecutionStatusEnum.SUCCEED; - } else { - executionStatus = ExecutionStatusEnum.FAILED; - if (job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } - } catch (Exception e) { - exception = e; - executionStatus = ExecutionStatusEnum.FAILED; - logger.warn("Failed to submit job.", e); - } - - return new ExecutionResultImpl(jobs, executionStatus, exception); - } - - @Override - public boolean terminate(Map jobs) { - return true; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/Help.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/Help.java deleted file mode 100644 index b2d34746c6..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/Help.java +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.CmdTemplateFactory; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.present.HelpInfoPresenter; -import org.apache.linkis.cli.core.present.model.HelpInfoModel; - -import java.util.Map; - -public class Help implements Execution { - @Override - public ExecutionResult execute(Map jobs) { - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Multiple Jobs is not Supported by current execution"); - } - - HelpInfoModel model = new HelpInfoModel(); - Job job = jobs.values().toArray(new Job[jobs.size()])[0]; - - model.buildModel(CmdTemplateFactory.getTemplateOri(job.getCmdType())); - - new HelpInfoPresenter().present(model, null); - return new ExecutionResultImpl(null, ExecutionStatusEnum.SUCCEED, null); - } - - @Override - public boolean terminate(Map jobs) { - return true; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/JobManagement.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/JobManagement.java deleted file mode 100644 index c4741bdf68..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/JobManagement.java +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.ManagableBackendJob; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; - -import java.util.Map; - -public class JobManagement implements Execution { - @Override - public ExecutionResult execute(Map jobs) { - ExecutionStatus executionStatus; - Exception exception = null; // TODO - - if (jobs == null || jobs.size() == 0) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Null or empty Jobs is submitted to current execution"); - } - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Multiple Jobs is not Supported by current execution"); - } - - Job job = jobs.get(jobs.keySet().toArray(new String[jobs.size()])[0]); - - if (!(job instanceof ManagableBackendJob)) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Backend for \"" + job.getClass().getCanonicalName() + "\" is not manageable"); - } - - if (job.getSubType() == null) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "SubExecType should not be null"); - } - - try { - ((ManagableBackendJob) job).doManage(); - if (((ManagableBackendJob) job).isSuccess()) { - executionStatus = ExecutionStatusEnum.SUCCEED; - } else { - executionStatus = ExecutionStatusEnum.FAILED; - if (job.getJobData() != null && job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } - - } catch (Exception e) { - exception = e; - executionStatus = ExecutionStatusEnum.FAILED; - } - - return new ExecutionResultImpl(jobs, executionStatus, exception); - } - - @Override - public boolean terminate(Map jobs) { - return true; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/SyncSubmission.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/SyncSubmission.java deleted file mode 100644 index c143e22357..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/execution/SyncSubmission.java +++ /dev/null @@ -1,195 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.execution; - -import org.apache.linkis.cli.common.entity.execution.Execution; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.job.*; -import org.apache.linkis.cli.core.interactor.result.ExecutionResultImpl; -import org.apache.linkis.cli.core.interactor.result.ExecutionStatusEnum; -import org.apache.linkis.cli.core.utils.CommonUtils; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.apache.commons.lang3.exception.ExceptionUtils; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * Execute job synchronously. i.e. Client submit job, and wait til job finish, and get result, no - * matter what server behaves. - */ -public class SyncSubmission implements Execution { - private static final Logger logger = LoggerFactory.getLogger(SyncSubmission.class); - - @Override - public ExecutionResult execute(Map jobs) { - - ExecutionStatus executionStatus; - Exception exception = null; // TODO - - if (jobs == null || jobs.size() == 0) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Null or empty Jobs is submitted to current execution"); - } - - if (jobs.size() > 1) { - throw new LinkisClientExecutionException( - "EXE0001", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Multiple Jobs is not Supported by current execution"); - } - - Job job = jobs.get(jobs.keySet().toArray(new String[jobs.size()])[0]); - - if (job instanceof SyncBackendJob) { - try { - ((SyncBackendJob) job).submitAndGetResult(); - } catch (Exception e) { - exception = e; - } - } else if (job instanceof AsyncBackendJob) { - try { - ExecWithAsyncBackend(job); - } catch (Exception e) { - exception = e; - // TODO: throw or fail - } - } else { - throw new LinkisClientExecutionException( - "EXE0002", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Executor Type: \"" + job.getClass().getCanonicalName() + "\" is not Supported"); - } - - if (job.getJobData() != null - && job.getJobData().getJobStatus() != null - && job.getJobData().getJobStatus().isJobSuccess()) { - executionStatus = ExecutionStatusEnum.SUCCEED; - } else if (job.getJobData().getJobStatus() == null - || !job.getJobData().getJobStatus().isJobFinishedState()) { - executionStatus = ExecutionStatusEnum.UNDEFINED; - if (job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } else { - executionStatus = ExecutionStatusEnum.FAILED; - if (job.getJobData().getException() != null) { - exception = job.getJobData().getException(); - } - } - - return new ExecutionResultImpl(jobs, executionStatus, exception); - } - - @Override - public boolean terminate(Map jobs) { - boolean ok = true; - for (Job job : jobs.values()) { - if (job.getJobData() == null || job.getJobData().getJobStatus() == null) { - continue; - } - String jobId = job.getJobData().getJobID() == null ? "NULL" : job.getJobData().getJobID(); - if (job instanceof TerminatableJob) { - try { - ((TerminatableJob) job).terminate(); - } catch (Exception e) { - System.out.println( - "Failed to kill job: jobId=" + jobId + ". " + ExceptionUtils.getStackTrace(e)); - } - if (!job.getJobData().getJobStatus().isJobCancelled() - || !job.getJobData().getJobStatus().isJobFailure()) { - ok = false; - System.out.println( - "Failed to kill job: jobId=" - + jobId - + ", current status: " - + job.getJobData().getJobStatus().toString()); - } else { - System.out.println( - "Successfully killed job: jobId=" - + jobId - + ", current status: " - + job.getJobData().getJobStatus().toString()); - } - } else { - System.out.println("Job \"" + jobId + "\"" + "is not terminatable"); - } - } - return ok; - } - - private void ExecWithAsyncBackend(Job job) { - - if (!(job instanceof AsyncBackendJob)) { - throw new LinkisClientExecutionException( - "EXE0002", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "job is not instance of AsyncBackendJob"); - } - AsyncBackendJob submitJob = (AsyncBackendJob) job; - - submitJob.submit(); - CommonUtils.doSleepQuietly(CommonConstants.JOB_QUERY_SLEEP_MILLS); - - if (!submitJob.getJobData().getJobStatus().isJobSubmitted()) { - throw new LinkisClientExecutionException( - "EXE0005", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionErr, - "Retry exhausted checking job submission. Job is probably not submitted"); - } else { - // Output that job is submitted - StringBuilder infoBuilder = new StringBuilder(); - infoBuilder.append("Job is successfully submitted!").append(System.lineSeparator()); - LogUtils.getInformationLogger().info(infoBuilder.toString()); - } - - if (job instanceof LogAccessibleJob) { - /* - Non-blocking, call if back-end supports it - */ - ((LogAccessibleJob) job).startRetrieveLog(); - } - - submitJob.waitJobComplete(); - - if (submitJob.getJobData().getJobStatus().isJobFinishedState()) { - if (job instanceof ResultAccessibleJob) { - /* - Non-blocking, call if back-end supports it - */ - ((ResultAccessibleJob) job).startRetrieveResult(); - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AbstractJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AbstractJob.java deleted file mode 100644 index 0de5e27ce6..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AbstractJob.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; - -public abstract class AbstractJob implements Job { - protected String cid; - protected CmdType cmdType; - protected JobSubType subType; - protected JobOperator operator; - private PresentWay presentWay; - - @Override - public String getCid() { - return cid; - } - - public void setCid(String cid) { - this.cid = cid; - } - - @Override - public CmdType getCmdType() { - return cmdType; - } - - public void setCmdType(CmdType cmdType) { - this.cmdType = cmdType; - } - - @Override - public JobSubType getSubType() { - return subType; - } - - public void setSubType(JobSubType subType) { - this.subType = subType; - } - - @Override - public JobOperator getJobOperator() { - return operator; - } - - public void setOperator(JobOperator operator) { - this.operator = operator; - } - - @Override - public PresentWay getPresentWay() { - return presentWay; - } - - public void setPresentWay(PresentWay presentWay) { - this.presentWay = presentWay; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AsyncBackendJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AsyncBackendJob.java deleted file mode 100644 index 636a361cad..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/AsyncBackendJob.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** - * If backend supports async-submission, i.e. submit job and then return while job is running. Then - * implement this. Note that all results return by server should be returned but stored in {@link - * JobData} - */ -public interface AsyncBackendJob extends Job { - - void submit() throws LinkisClientRuntimeException; - - void updateJobStatus() throws LinkisClientRuntimeException; - - void waitJobComplete() throws LinkisClientRuntimeException; - - void terminate() throws LinkisClientRuntimeException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/JobBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/JobBuilder.java deleted file mode 100644 index 566fe3b279..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/JobBuilder.java +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.entity.job.JobData; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.core.builder.BuildableByVarAccess; - -public abstract class JobBuilder extends BuildableByVarAccess { - - public JobBuilder setCid(String cid) { - targetObj.setCid(cid); - return this; - } - - public JobBuilder setCmdType(CmdType cmdType) { - targetObj.setCmdType(cmdType); - return this; - } - - public JobBuilder setJobSubType(JobSubType subType) { - targetObj.setSubType(subType); - return this; - } - - protected abstract JobDescription buildJobDesc(); - - protected abstract JobData buildJobData(); - - protected abstract JobOperator buildJobOperator(); - - protected abstract PresentWay buildPresentWay(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/LogAccessibleJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/LogAccessibleJob.java deleted file mode 100644 index d372c444a3..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/LogAccessibleJob.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; - -/** If backend supports extracting log while job is running, then implement this interface */ -public interface LogAccessibleJob extends Job { - /** - * Retrieve log. This methods should be non-blocking and log should be appended into {@link - * JobData} - */ - void startRetrieveLog(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ManagableBackendJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ManagableBackendJob.java deleted file mode 100644 index 204134110b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ManagableBackendJob.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.job.JobSubType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** - * Backend support job-management, e.g. get status, get log, kill job etc. Then implement this - * interface. - */ -public interface ManagableBackendJob extends Job { - /** - * since job management can diverge, we decide to assign it to lower-level implementation. - * implementation should use {@link JobSubType} to decide which action to take, hence {@link - * JobSubType} should not be null - */ - void doManage() throws LinkisClientRuntimeException; - - /** - * if execution is success. This can be different from {@link JobStatus} e.g. query job status, - * job may be FAIL but execution is a asuccess - */ - boolean isSuccess(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminatableJob.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminatableJob.java deleted file mode 100644 index ff7e832724..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminatableJob.java +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.job; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; - -/** Backend supports killing job */ -public interface TerminatableJob extends Job { - /** for jobs that starts a new thread */ - TerminateToken getTerminateToken(); - - void terminate() throws LinkisClientRuntimeException; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropertiesLoader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropertiesLoader.java deleted file mode 100644 index df79c8262e..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropertiesLoader.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; - -public interface PropertiesLoader { - PropertiesLoader setPropertiesReaders(PropertiesReader[] readers); - - PropertiesLoader addPropertiesReader(PropertiesReader reader); - - PropertiesLoader addPropertiesReaders(PropertiesReader[] readers); - - void removePropertiesReader(String identifier); - - ClientProperties[] loadProperties(); - - void checkInit(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysEnvReader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysEnvReader.java deleted file mode 100644 index b89ebe9755..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysEnvReader.java +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties.reader; - -import org.apache.linkis.cli.core.constants.CommonConstants; - -import java.util.Properties; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SysEnvReader implements PropertiesReader { - private static final Logger logger = LoggerFactory.getLogger(SysEnvReader.class); - private String propsId = CommonConstants.SYSTEM_ENV_IDENTIFIER; - private String propsPath = "SYSTEM"; - - @Override - public String getPropsId() { - return propsId; - } - - @Override - public PropertiesReader setPropsId(String identifier) { - this.propsId = identifier; - return this; - } - - @Override - public String getPropsPath() { - return propsPath; - } - - @Override - public PropertiesReader setPropsPath(String propertiesPath) { - this.propsPath = propertiesPath; - return this; - } - - @Override - public Properties getProperties() { - checkInit(); - Properties props = new Properties(); - props.putAll(System.getenv()); - return props; - } - - @Override - public void checkInit() {} -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysPropsReader.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysPropsReader.java deleted file mode 100644 index bf4bbc0270..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/SysPropsReader.java +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties.reader; - -import org.apache.linkis.cli.core.constants.CommonConstants; - -import java.util.Properties; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SysPropsReader implements PropertiesReader { - private static final Logger logger = LoggerFactory.getLogger(SysPropsReader.class); - private String propsId = CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER; - private String propsPath = "SYSTEM"; - - @Override - public String getPropsId() { - return propsId; - } - - @Override - public PropertiesReader setPropsId(String identifier) { - this.propsId = identifier; - return this; - } - - @Override - public String getPropsPath() { - return propsPath; - } - - @Override - public PropertiesReader setPropsPath(String propertiesPath) { - this.propsPath = propertiesPath; - return this; - } - - @Override - public Properties getProperties() { - checkInit(); - return System.getProperties(); - } - - @Override - public void checkInit() {} -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/DefaultResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/DefaultResultHandler.java deleted file mode 100644 index 836aeb7ad8..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/DefaultResultHandler.java +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.utils.LogUtils; - -import static java.lang.System.exit; - -/** exit -1 when failure and exit 0 when success */ -public class DefaultResultHandler implements ResultHandler { - @Override - public void process(ExecutionResult executionResult) { - if (executionResult.getExecutionStatus() == ExecutionStatusEnum.SUCCEED) { - LogUtils.getPlaintTextLogger().info(CommonConstants.SUCCESS_INDICATOR); - exit(0); - } else { - LogUtils.getPlaintTextLogger().info(CommonConstants.FAILURE_INDICATOR); - exit(-1); - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionResultImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionResultImpl.java deleted file mode 100644 index b145ba9c8b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionResultImpl.java +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; - -import java.util.Map; - -public class ExecutionResultImpl implements ExecutionResult { - - Map jobsMap; - ExecutionStatus executionStatus; - Exception exception; - - public ExecutionResultImpl(Map jobsMap, ExecutionStatus executionStatus) { - this.jobsMap = jobsMap; - this.executionStatus = executionStatus; - } - - public ExecutionResultImpl( - Map jobsMap, ExecutionStatus executionStatus, Exception exception) { - this.jobsMap = jobsMap; - this.executionStatus = executionStatus; - this.exception = exception; - } - - @Override - public Map getJobs() { - return jobsMap; - } - - @Override - public ExecutionStatus getExecutionStatus() { - return this.executionStatus; - } - - @Override - public void setExecutionStatus(ExecutionStatus executionStatus) { - this.executionStatus = executionStatus; - } - - @Override - public Exception getException() { - return this.exception; - } - - @Override - public void setException(Exception exception) { - this.exception = exception; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java deleted file mode 100644 index 5a3fbfa4a2..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/PresentResultHandler.java +++ /dev/null @@ -1,78 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.result; - -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; -import org.apache.linkis.cli.common.entity.result.ResultHandler; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class PresentResultHandler implements ResultHandler { - private static Logger logger = LoggerFactory.getLogger(PresentResultHandler.class); - Presenter presenter; - Model model; - - public void checkInit() { - if (presenter == null || model == null) { - throw new LinkisClientExecutionException( - "EXE0031", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionResultErr, - "Presenter or model is null"); - } - } - - public void setPresenter(Presenter presenter) { - this.presenter = presenter; - } - - public void setModel(Model model) { - this.model = model; - } - - @Override - public void process(ExecutionResult executionResult) { - checkInit(); - Map jobs = executionResult.getJobs(); - // Probably need modification if we further want multiple-jobs support - // but we probably don't want to support that - if (jobs != null) { - for (Job job : jobs.values()) { - if (job != null) { - model.buildModel(job.getJobData()); - } - try { - presenter.present(model, job.getPresentWay()); - } catch (Exception e) { - logger.error("Execution failed because exception thrown when presenting data.", e); - executionResult.setExecutionStatus(ExecutionStatusEnum.FAILED); - executionResult.setException(e); - } - } - } - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/SysVarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/SysVarAccess.java deleted file mode 100644 index d291e1bda5..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/SysVarAccess.java +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.var; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.VarAccessException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.ArrayList; -import java.util.List; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class SysVarAccess implements VarAccess { - private static Logger logger = LoggerFactory.getLogger(SysVarAccess.class); - private ClientProperties sysProp; - private ClientProperties sysEnv; - - public SysVarAccess setSysProp(ClientProperties sysProp) { - this.sysProp = sysProp; - return this; - } - - public ClientProperties getSysProp(String identifier) { - return this.sysProp; - } - - public SysVarAccess setSysEnv(ClientProperties sysEnv) { - this.sysEnv = sysEnv; - return this; - } - - public ClientProperties getSysEnv(String identifier) { - return this.sysEnv; - } - - @Override - public void checkInit() { - if (this.sysProp == null && this.sysEnv == null) { - throw new VarAccessException( - "VA0001", - ErrorLevel.ERROR, - CommonErrMsg.VarAccessInitErr, - "sys_prop and sys_env are both null"); - } - } - - @Override - public T getVar(Class clazz, String key) { - checkInit(); - if (clazz != String.class) { - // throw exception - } - Object o1 = sysProp.get(key); - Object o2 = sysEnv.get(key); - if (o1 != null && o2 != null) { - throw new VarAccessException( - "VA0002", - ErrorLevel.WARN, - CommonErrMsg.VarAccessErr, - "same key occurred in sys_prop and sys_env. will use sys_prop"); - } - Object ret = o1 != null ? o1 : o2; - return clazz.cast(ret); - } - - @Override - public T getVarOrDefault(Class clazz, String key, T defaultValue) { - T ret = getVar(clazz, key); - if (ret == null) { - ret = defaultValue; - } - return ret; - } - - @Override - public String[] getAllVarKeys() { - List varKeys = new ArrayList<>(); - if (sysProp != null) { - for (Object key : sysProp.keySet()) { - varKeys.add((String) key); - } - } - if (sysEnv != null) { - for (Object key : sysEnv.keySet()) { - varKeys.add((String) key); - } - } - return varKeys.toArray(new String[varKeys.size()]); - } - - @Override - public boolean hasVar(String key) { - return sysEnv.containsKey(key) || sysProp.containsKey(key); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorFactory.java deleted file mode 100644 index dd6031d66a..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorFactory.java +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.operator; - -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.locks.ReentrantLock; - -public class JobOperatorFactory { - private static Map builderMap = new ConcurrentHashMap<>(); - - private static Map instanceMap = new ConcurrentHashMap<>(); // for singleton - private static Map lockMap = new ConcurrentHashMap<>(); // for singleton - - public static synchronized void register(String name, JobOperatorBuilder builder) - throws Exception { - if (builderMap.containsKey(name) - || lockMap.containsKey(name) - || instanceMap.containsKey(name)) { - throw new LinkisClientExecutionException( - "EXE0027", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Attempting to register a duplicate jobOperator, name: " + name); - } - builderMap.put(name, builder); - lockMap.put(name, new ReentrantLock()); - } - - public static synchronized void remove(String name) { - builderMap.remove(name); - instanceMap.remove(name); - lockMap.remove(name); - } - - public static JobOperator getReusable(String name) throws Exception { - JobOperatorBuilder builder = builderMap.get(name); - ReentrantLock lock = lockMap.get(name); - JobOperator instance = instanceMap.get(name); - if (lock == null || builder == null) { - throw new LinkisClientExecutionException( - "EXE0028", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Failed to get a reusable joboperator, name: " + name); - } - if (instance == null) { - boolean ok = lock.tryLock(500, TimeUnit.MILLISECONDS); - if (!ok) { - throw new LinkisClientExecutionException( - "EXE0028", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Failed to get a reusable joboperator, name: " + name); - } - if (instance == null) { - instance = builder.build(); - instanceMap.put(name, instance); - } - lock.unlock(); - } - return instance; - } - - public static JobOperator getNew(String name) throws Exception { - JobOperatorBuilder builder = builderMap.get(name); - if (builder == null) { - throw new Exception("TODO"); // TODO - } - return builder.build(); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/DefaultStdOutPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/DefaultStdOutPresenter.java deleted file mode 100644 index 7ed2307520..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/DefaultStdOutPresenter.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.StdOutWriter; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.CommonUtils; - -public class DefaultStdOutPresenter implements Presenter { - DisplayOperator driver = new StdOutWriter(); - - @Override - public void present(Model model, PresentWay presentWay) { - driver.doOutput(new StdoutDisplayData(CommonUtils.GSON.toJson(model))); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentMode.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentMode.java deleted file mode 100644 index 6d26a2eb49..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentMode.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present; - -public interface PresentMode { - String getName(); -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperFactory.java deleted file mode 100644 index 9f584f4505..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperFactory.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentMode; - -import java.util.Map; -import java.util.concurrent.ConcurrentHashMap; - -public class DisplayOperFactory { - private static final Map operatorMap = new ConcurrentHashMap<>(); - - public static synchronized void register(PresentMode mode, DisplayOperator operator) - throws Exception { - if (operatorMap.containsKey(mode.getName())) { - throw new PresenterException( - "PST0012", - ErrorLevel.ERROR, - CommonErrMsg.PresenterInitErr, - "Attempting to register a duplicate DisplayOperator, name: " + mode.getName()); - } - operatorMap.put(mode.getName(), operator); - } - - public static synchronized void remove(PresentMode mode) { - operatorMap.remove(mode.getName()); - } - - public static DisplayOperator getDisplayOper(PresentMode mode) { - return operatorMap.get(mode.getName()); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/StdOutWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/StdOutWriter.java deleted file mode 100644 index bc91340dbf..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/StdOutWriter.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.display.data.DisplayData; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.LogUtils; - -import org.slf4j.Logger; - -public class StdOutWriter implements DisplayOperator { - @Override - public void doOutput(DisplayData data) { - if (!(data instanceof StdoutDisplayData)) { - throw new PresenterException( - "PST0008", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "input data is not instance of StdoutDisplayData"); - } - String content = ((StdoutDisplayData) data).getContent(); - Logger logger = LogUtils.getPlaintTextLogger(); - logger.info(content); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/DisplayData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/DisplayData.java deleted file mode 100644 index 38463b7269..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/DisplayData.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display.data; - -public interface DisplayData {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/FileDisplayData.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/FileDisplayData.java deleted file mode 100644 index 607e05fd86..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/FileDisplayData.java +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.present.display.data; - -public class FileDisplayData extends StdoutDisplayData implements DisplayData { - private String pathName; - private String fileName; - private Boolean createNewFile; - - public FileDisplayData(String pathName, String fileName, String content, Boolean createNewFile) { - super(content); - this.pathName = pathName; - this.fileName = fileName; - this.createNewFile = createNewFile; - } - - public String getPathName() { - return pathName; - } - - public String getContent() { - return super.getContent(); - } - - public boolean getCreateNewFile() { - return createNewFile; - } - - public String getFileName() { - return fileName; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/CommonUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/CommonUtils.java deleted file mode 100644 index 8d1a3b8ac9..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/CommonUtils.java +++ /dev/null @@ -1,82 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.utils; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -import com.google.gson.Gson; -import com.google.gson.GsonBuilder; - -public class CommonUtils { - - public static final Gson GSON = - new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); - - public static T castStringToAny(Class clazz, String val) { - if (StringUtils.isBlank(val)) { - return null; - } - T ret = null; - if (clazz == Object.class) { - ret = clazz.cast(val); - } else if (clazz == String.class) { - ret = clazz.cast(val); - } else if (clazz == Integer.class) { - ret = clazz.cast(Integer.parseInt(val)); - } else if (clazz == Double.class) { - ret = clazz.cast(Double.parseDouble(val)); - } else if (clazz == Float.class) { - ret = clazz.cast(Float.parseFloat(val)); - } else if (clazz == Long.class) { - ret = clazz.cast(Long.parseLong(val)); - } else if (clazz == Boolean.class) { - ret = clazz.cast(Boolean.parseBoolean(val)); - } - return ret; - } - - public static void doSleepQuietly(Long sleepMills) { - try { - Thread.sleep(sleepMills); - } catch (Exception ignore) { - // ignored - } - } - - public static Map parseKVStringToMap(String kvStr, String separator) { - if (StringUtils.isBlank(separator)) { - separator = ","; - } - if (StringUtils.isBlank(kvStr)) { - return null; - } - Map argsProps = new HashMap<>(); - String[] args = StringUtils.splitByWholeSeparator(kvStr, separator); - for (String arg : args) { - int index = arg.indexOf("="); - if (index != -1) { - argsProps.put(arg.substring(0, index).trim(), arg.substring(index + 1).trim()); - } - } - - return argsProps; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/CommonConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/CommonConstantsTest.java deleted file mode 100644 index a1fb04f01d..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/CommonConstantsTest.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class CommonConstantsTest { - - @Test - @DisplayName("constTest") - public void constTest() { - - Long jobQuerySleepMills = CommonConstants.JOB_QUERY_SLEEP_MILLS; - Integer requestMaxRetryTime = CommonConstants.REQUEST_MAX_RETRY_TIME; - String universalSubcmd = CommonConstants.UNIVERSAL_SUBCMD; - String universalSubcmdDesc = CommonConstants.UNIVERSAL_SUBCMD_DESC; - String successIndicator = CommonConstants.SUCCESS_INDICATOR; - String failureIndicator = CommonConstants.FAILURE_INDICATOR; - String arraySeq = CommonConstants.ARRAY_SEQ; - String arraySeqRegex = CommonConstants.ARRAY_SEQ_REGEX; - int maxNumOfCommandArguements = CommonConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS; - String configDir = CommonConstants.CONFIG_DIR; - String[] configExtension = CommonConstants.CONFIG_EXTENSION; - String systemPropertiesIdentifier = CommonConstants.SYSTEM_PROPERTIES_IDENTIFIER; - String systemEnvIdentifier = CommonConstants.SYSTEM_ENV_IDENTIFIER; - - Assertions.assertTrue(2000l == jobQuerySleepMills); - Assertions.assertTrue(3 == requestMaxRetryTime); - Assertions.assertEquals("linkis-cli", universalSubcmd); - Assertions.assertEquals( - "command for all types of jobs supported by Linkis", universalSubcmdDesc); - - Assertions.assertEquals("############Execute Success!!!########", successIndicator); - Assertions.assertEquals("############Execute Error!!!########", failureIndicator); - Assertions.assertEquals("@#@", arraySeq); - Assertions.assertEquals("(?=([^\"]*\"[^\"]*\")*[^\"]*$)", arraySeqRegex); - Assertions.assertTrue(10 == maxNumOfCommandArguements); - Assertions.assertEquals("config.path", configDir); - Assertions.assertTrue(configExtension.length == 1); - Assertions.assertEquals("SYS_PROP", systemPropertiesIdentifier); - Assertions.assertEquals("SYS_ENV", systemEnvIdentifier); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/TestConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/TestConstants.java deleted file mode 100644 index 307369054b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/constants/TestConstants.java +++ /dev/null @@ -1,100 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.constants; - -public class TestConstants { - - public static final String PRIMARY = "test.primary"; - public static final String PRIMARY_DESC = " command"; - - // for command name - public static final String SPARK = "spark"; - public static final String HIVE = "hive"; - - // for command description - public static final String SPARK_DESC = "Execute sql with spark"; - public static final String HIVE_DESC = "Execute hive sql"; - public static final String JOB_DESC = ""; - - // Common command params - public static final String PARAM_COMMON_FILE = "param.common.file"; - public static final String PARAM_COMMON_CMD = "param.common.command"; - public static final String PARAM_COMMON_OUTPUT = "param.common.output"; - public static final String PARAM_COMMON_ARGS = "param.common.args"; - public static final String PARAM_COMMON_SPLIT = "param.common.split"; - public static final String PARAM_COMMON_OTHER_KV = - "param.common.other.kv"; // for customize some parameters for some commands. Has to be - // in the for of kv-pairs separated by ',' - public static final String PARAM_YARN_QUEUE = "param.yarn.queue"; - - // for job command - public static final String PARAM_JOB_TYPE = "param.job.type"; - public static final String PARAM_JOB_ID = "param.job.id"; - public static final String PARAM_FORCE_KILL = "param.force.kill"; - public static final String LINKIS_JOBID_PREFIX = "lks_"; - - public static final String PARAM_MAPPER_KV_STRING = - "param.mapper.kv.string"; // Rules for ParamKeyMapper. format: - // key1=value1,key2=value2... - public static final String YARN_QUEUE = "wds.linkis.yarnqueue"; - public static final String YARN_QUEUE_DEFAULT = "default"; - public static final String YARN_QUEUE_CORE_MAX = "wds.linkis.yarnqueue.cores.max"; - public static final int YARN_QUEUE_CORE_MAX_DEFAULT = 150; - public static final String YARN_QUEUE_MEM_MAX = "wds.linkis.yarnqueue.memory.max"; - public static final String YARN_QUEUE_MEM_MAX_DEFAULT = "300G"; - public static final String PREHEATING_TIME = "wds.linkis.preheating.time"; - public static final String PREHEATING_TIME_DEFAULT = "9:00"; - public static final String TMPFILE_CLEAN_TIME = "wds.linkis.tmpfile.clean.time"; - public static final String TMPFILE_CLEAN_TIME_DEFAULT = "10:00"; - public static final String LINKIS_INSTANCE = "wds.linkis.instance"; - public static final int LINKIS_INSTANCE_DEFAULT = 10; - public static final String LINKIS_CLIENT_MEMORY_MAX = "wds.linkis.client.memory.max"; - public static final String LINKIS_CLIENT_MEMORY_MAX_DEFAULT = "20G"; - - // Common - public static final String LINKIS_NULL_VALUE = ""; - - public static final String SPARK_CMD = "spark"; - - public static final String PARAM_SPARK_NAME = "param.spark.name"; - public static final String PARAM_SPARK_HIVECONF = "param.spark.hiveconf"; - public static final String PARAM_SPARK_NUM_EXECUTORS = "param.spark.num.executors"; - public static final String PARAM_SPARK_EXECUTOR_CORES = "param.spark.executor.cores"; - public static final String PARAM_SPARK_EXECUTOR_MEMORY = "param.spark.executor.memory"; - public static final String PARAM_SPARK_SHUFFLE_PARTITIONS = "param.spark.shuffle.partitions"; - public static final String PARAM_SPARK_RUNTYPE = "param.spark.runtype"; - - public static final String LINKIS_SPARK_NAME = "appName"; - public static final String LINKIS_SPARK_NUM_EXECUTORS = "spark.executor.instances"; - public static final String LINKIS_SPARK_EXECUTOR_CORES = "spark.executor.cores"; - public static final String LINKIS_SPARK_EXECUTOR_MEMORY = "spark.executor.memory"; - public static final String LINKIS_SPARK_SHUFFLE_PARTITIONS = "spark.sql.shuffle.partitions"; - - public static final String PARAM_DB = "param.primary.database"; - public static final String PARAM_PROXY = "param.primary.proxy"; - public static final String PARAM_USER = "param.primary.user"; - public static final String PARAM_USR_CONF = "param.primary.user.conf"; - public static final String PARAM_PASSWORD = "param.primary.password"; - public static final String PARAM_SYNC_KEY = "param.primary.synckey"; - public static final String PARAM_PROXY_USER = "param.primary.proxyUser"; - public static final String PARAM_HELP = "param.help"; - public static final String PARAM_REAL_NAME = "param.primary.realName"; - public static final String PARAM_PIN_TOKEN = "param.primary.pinToken"; - - public static final String PARAM_PROPERTIES = "params.properties"; -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/error/CommonErrMsgTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/error/CommonErrMsgTest.java deleted file mode 100644 index cdd51f4aea..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/error/CommonErrMsgTest.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.error; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class CommonErrMsgTest { - - @Test - @DisplayName("enumTest") - public void enumTest() { - - String templateGenerr = CommonErrMsg.TemplateGenErr.getMsgTemplate(); - String fitErrMsgTemplate = CommonErrMsg.TemplateFitErr.getMsgTemplate(); - String parserInitErrMsgTemplate = CommonErrMsg.ParserInitErr.getMsgTemplate(); - String parseErrMsgTemplate = CommonErrMsg.ParserParseErr.getMsgTemplate(); - - Assertions.assertEquals("Cannot generate template. :{0}", templateGenerr); - Assertions.assertEquals("Cannot fit input into template: {0}", fitErrMsgTemplate); - - Assertions.assertEquals("Failed to init parser: {0}", parserInitErrMsgTemplate); - Assertions.assertEquals("Failed to parse. {0}", parseErrMsgTemplate); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandlerTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandlerTest.java deleted file mode 100644 index ee23f0216b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/exception/handler/CommandExceptionHandlerTest.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.exception.handler; - -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.handler.ExceptionHandler; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; - -import org.junit.jupiter.api.Disabled; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; - -public class CommandExceptionHandlerTest { - ExceptionHandler handler = new CommandExceptionHandler(); - - // todo - @Disabled - @Test - public void handle() throws Exception { - CommandException cmdException = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException)); - - String[] params = {"param1", "param2"}; - CommandException cmdException2 = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - params, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException2)); - - CommandException cmdException3 = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - TestCmdType.PRIMARY, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException3)); - - CommandException cmdException4 = - new CommandException( - "CODE-001", - ErrorLevel.ERROR, - CommonErrMsg.TemplateGenErr, - TestCmdType.PRIMARY, - params, - "Failed to generate template."); - assertDoesNotThrow(() -> handler.handle(cmdException4)); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/TestCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/TestCmdType.java deleted file mode 100644 index a6b05eef2b..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/TestCmdType.java +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command; - -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.core.constants.TestConstants; - -public enum TestCmdType implements CmdType { - PRIMARY(TestConstants.PRIMARY, 0, TestConstants.PRIMARY_DESC), - SPARK(TestConstants.SPARK, 1, TestConstants.SPARK_DESC); - // TEST(TestConstants.TEST_COMMAND, 0, TestConstants.TEST_DESC); - - private int id; - private String name; - private String desc; - - TestCmdType(String name, int id) { - this.id = id; - this.name = name; - this.desc = null; - } - - TestCmdType(String name, int id, String desc) { - this.id = id; - this.name = name; - this.desc = desc; - } - - @Override - public int getId() { - return this.id; - } - - @Override - public String getName() { - return this.name; - } - - @Override - public String getDesc() { - return this.desc; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtilsTest.java deleted file mode 100644 index 5c51a582fa..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtilsTest.java +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.fitter; - -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; - -public class FitterUtilsTest { - - @Test - @DisplayName("isOptionTest") - public void isOptionTest() { - - String name = "-hadoop"; - boolean option = FitterUtils.isOption(name); - Assertions.assertTrue(option); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitterTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitterTest.java deleted file mode 100644 index ef542910dd..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitterTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.fitter; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.core.interactor.command.template.TestSparkCmdTemplate; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -public class SingleTplFitterTest { - Fitter fitter; - CmdTemplate template; - String[] cmdStr, cmdStr2; - - @BeforeEach - public void before() throws Exception { - cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "/path/to/user/config", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - }; - cmdStr2 = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "/path/to/user/config", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - "-P", - "key1=value1, key2=value2, key5=\"key3=value3,key4=value4\" " - }; - template = new TestSparkCmdTemplate(); - fitter = new SingleTplFitter(); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: fit(TemplateFitterInput[] inputs) */ - @Test - public void testParseAndFit() throws Exception { - FitterResult[] results = new FitterResult[2]; - results[0] = fitter.fit(cmdStr, template); - results[1] = fitter.fit(cmdStr2, new TestSparkCmdTemplate()); - - assertTrue(results[0].getParsedTemplate() instanceof TestSparkCmdTemplate); - assertEquals( - results[0].getParsedTemplate().getOptionsMap().get("--cmd").getValue(), "show tables"); - assertNotEquals(results[0].getParsedTemplate(), template.getCopy()); - assertNotEquals(results[0].getParsedTemplate().getOptions(), template.getCopy().getOptions()); - assertNotEquals( - results[0].getParsedTemplate().getOptions().get(1), template.getCopy().getOptions().get(1)); - assertEquals( - results[0].getParsedTemplate().getOptions().get(1).getValue(), - template.getCopy().getOptions().get(1).getValue()); - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParserTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParserTest.java deleted file mode 100644 index 61b8c72eab..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParserTest.java +++ /dev/null @@ -1,177 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.parser; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.ParamItem; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.command.template.TestParamMapper; -import org.apache.linkis.cli.core.interactor.command.template.TestSparkCmdTemplate; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.*; - -public class SingleCmdParserTest { - String[] cmdStr; - Map templateMap; - - @BeforeEach - public void before() throws Exception { - cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "/path/to/user/config", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - }; - - TestSparkCmdTemplate template = new TestSparkCmdTemplate(); - templateMap = new HashMap<>(); - templateMap.put(template.getCmdType().getName(), template); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: parse(String[] input) */ - @Test - public void testParse() { - - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(templateMap.get(TestCmdType.SPARK.getName())) - .setFitter(new SingleTplFitter()) - .setMapper(new TestParamMapper()); - - ParseResult result = parser.parse(cmdStr); - - assertEquals(result.getParams().getCmdType(), TestCmdType.SPARK); - - Map params = result.getParams().getParamItemMap(); - - assertEquals(params.size(), 21); - - List sortedKeys = params.keySet().stream().sorted().collect(Collectors.toList()); - assertEquals( - sortedKeys, - Arrays.asList( - "converted.args", - "converted.split", - "param.common.command", - "param.common.file", - "param.common.other.kv", - "param.help", - "param.primary.database", - "param.primary.password", - "param.primary.proxy", - "param.primary.proxyUser", - "param.primary.synckey", - "param.primary.user", - "param.primary.user.conf", - "param.spark.executor.cores", - "param.spark.executor.memory", - "param.spark.hiveconf", - "param.spark.name", - "param.spark.num.executors", - "param.spark.runtype", - "param.spark.shuffle.partitions", - "param.yarn.queue")); - assertEquals(result.getRemains().length, 0); - } - - /** Method: parsePrimary(String[] input) */ - @Test - public void testParsePrimary() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = SingleCmdParser.getClass().getMethod("parsePrimary", String[].class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: parseSingleSub(String[] remains) */ - @Test - public void testParseSingleSub() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = SingleCmdParser.getClass().getMethod("parseSingleSub", String[].class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: standardParse(String identifier, String[] args, CmdTemplate templateOri) */ - @Test - public void testStandardParse() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = SingleCmdParser.getClass().getMethod("standardParse", String.class, String[].class, CmdTemplate.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestParamMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestParamMapper.java deleted file mode 100644 index 25f44d56c7..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestParamMapper.java +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.template; - -import org.apache.linkis.cli.core.constants.TestConstants; -import org.apache.linkis.cli.core.interactor.command.parser.transformer.ParamKeyMapper; - -public class TestParamMapper extends ParamKeyMapper { - - @Override - public void initMapperRules() { - super.updateMapping(TestConstants.SPARK, "converted.cmd"); - super.updateMapping(TestConstants.PARAM_COMMON_ARGS, "converted.args"); - super.updateMapping(TestConstants.PARAM_COMMON_SPLIT, "converted.split"); - // super.updateMapping("key1", "spark.cmd"); //should throw exception - // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); - // super.updateMapping("TestConstants.PARAM_SPARK_CMD", "spark.cmd"); - - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestSparkCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestSparkCmdTemplate.java deleted file mode 100644 index 4795f11c9c..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/command/template/TestSparkCmdTemplate.java +++ /dev/null @@ -1,268 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.command.template; - -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.core.constants.TestConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; -import org.apache.linkis.cli.core.interactor.command.template.option.StdOption; - -import org.apache.commons.lang3.StringUtils; - -import java.util.HashMap; -import java.util.Map; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -public class TestSparkCmdTemplate extends AbstractCmdTemplate { - protected StdOption databaseOp = - option( - null, - TestConstants.PARAM_DB, - new String[] {"-d", "--database"}, - "specify database", - true, - ""); - protected StdOption proxyOp = - option( - null, - TestConstants.PARAM_PROXY, - new String[] {"-x", "--proxy"}, - "specify proxy url", - true, - ""); - protected StdOption userOp = - option( - null, TestConstants.PARAM_USER, new String[] {"-u", "--user"}, "specify user", true, ""); - protected StdOption confOp = - option( - null, - TestConstants.PARAM_USR_CONF, - new String[] {"-c", "--conf"}, - "specify configuration from property file", - true, - ""); - private Logger logger = LoggerFactory.getLogger(TestSparkCmdTemplate.class); - private StdOption passwordOp = - option( - null, - TestConstants.PARAM_PASSWORD, - new String[] {"-pwd", "--passwd"}, - "specify user password", - true, - ""); - private StdOption syncOp = - option( - null, - TestConstants.PARAM_SYNC_KEY, - new String[] {"-sk", "--synckey"}, - "specify sync key", - true, - ""); - private StdOption proxyUserOp = - option( - null, - TestConstants.PARAM_PROXY_USER, - new String[] {"-pu", "--proxy-user"}, - "specify proxy user", - true, - ""); - - private StdOption helpOp = - option(null, TestConstants.PARAM_HELP, new String[] {"-h", "--help"}, "help info", true, ""); - - private StdOption filePara = - option( - null, - TestConstants.PARAM_COMMON_FILE, - new String[] {"--file", "-f"}, - "Spark SQL File to Execute!", - true, - ""); - - private StdOption commandPara = - option( - null, - TestConstants.PARAM_COMMON_CMD, - new String[] {"--cmd"}, - "Spark SQL Command to Execute!", - true, - ""); - - private StdOption argsPara = - option( - null, - TestConstants.PARAM_COMMON_ARGS, - new String[] {"--args", "-a"}, - "Set command args, k-v pairs delimited by comma, e.g. key1=value1,key2=value2,...", - true, - ""); - - private StdOption splitPara = - option( - null, - TestConstants.PARAM_COMMON_SPLIT, - new String[] {"--split", "-s"}, - "specify the split character string", - true, - ","); - - private StdOption queuePara = - option( - null, - TestConstants.PARAM_YARN_QUEUE, - new String[] {"--queue", "-q"}, - "specify the queue", - true, - "default"); - - private StdOption namePara = - option( - null, - TestConstants.PARAM_SPARK_NAME, - new String[] {"--name", "-n"}, - "specify the application name. WARNING:this option is deprecated. Linkis does not support this variable", - true, - ""); - - private StdOption> hiveconfPara = - option( - null, - TestConstants.PARAM_SPARK_HIVECONF, - new String[] {"--hiveconf", "-hc"}, - "specify the hiveconf setting,e.g. hive.cli.print.header=false", - true, - new HashMap<>()); - - private StdOption nePara = - option( - null, - TestConstants.PARAM_SPARK_NUM_EXECUTORS, - new String[] {"--num-executors", "-ne"}, - "specify the spark application container", - true, - 3); - - private StdOption ecPara = - option( - null, - TestConstants.PARAM_SPARK_EXECUTOR_CORES, - new String[] {"--executor-cores", "-ec"}, - "specify the spark application container vcores(less than queue's max vcores)", - true, - 2); - - private StdOption emPara = - option( - null, - TestConstants.PARAM_SPARK_EXECUTOR_MEMORY, - new String[] {"--executor-memory", "-em"}, - "specify the spark application executor's memory, 1.5G-2G/vcore", - true, - "4G"); - - private StdOption spPara = - option( - null, - TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, - new String[] {"--shuffle-partitions", "-sp"}, - "specify the spark.sql.shuffle.partitions", - true, - 200); - - private StdOption otherPara = - option( - null, - TestConstants.PARAM_COMMON_OTHER_KV, - new String[] {"--other"}, - "specify the other parameters", - true, - ""); - - private StdOption runTypePara = - option( - null, - TestConstants.PARAM_SPARK_RUNTYPE, - new String[] {"--runtype"}, - "specify the runtype parameters: sql pyspark scala", - true, - "sql"); - - public TestSparkCmdTemplate() { - super(TestCmdType.SPARK); - } - - @Override - public void checkParams() throws CommandException {} - - @Override - protected Object clone() throws CloneNotSupportedException { - return super.clone(); - } - - @Override - public TestSparkCmdTemplate getCopy() { - return (TestSparkCmdTemplate) super.getCopy(); - } - - public void prepare() throws LinkisClientRuntimeException {} - - private Map parseOtherMap(String otherStr) { - Map otherMap = new HashMap<>(); - /** - * split by space-chara, but if space char is within \"\", then do not split. e.g. "a b c" will - * not be splited - */ - otherStr = otherStr.replace("\'", "\""); - String[] arr = otherStr.trim().split("\\s(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); - for (int i = 0; i < arr.length; i++) { - String candidate = arr[i]; - - if (StringUtils.equalsIgnoreCase(candidate, "--conf") && i < arr.length - 1) { - // '--conf' and has next - i++; // move to next - candidate = arr[i]; - int idx = StringUtils.indexOf(candidate, "="); - if (idx != -1) { - String key = StringUtils.substring(candidate, 0, idx).trim(); - String value = StringUtils.substring(candidate, idx + 1).trim(); - otherMap.put(key, value); - } - } else { - // without '--conf' - int idx = StringUtils.indexOf(candidate, " "); - if (idx != -1) { - // e.g. '--driver-memory 2G' - String key = StringUtils.substring(candidate, 0, idx).trim(); - String value = StringUtils.substring(candidate, idx + 1).trim(); - otherMap.put(key, value); - } else if (i < arr.length - 1) { - // e.g. --driver-memory 2G - String key = candidate.trim(); - i++; // move to next - candidate = arr[i]; - String value = candidate.trim(); - otherMap.put(key, value); - } - } - } - return otherMap; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoaderTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoaderTest.java deleted file mode 100644 index 9bef89b3f3..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoaderTest.java +++ /dev/null @@ -1,119 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.properties; - -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; - -import java.util.Arrays; -import java.util.List; -import java.util.stream.Collectors; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; - -public class StdPropsLoaderTest { - PropertiesLoader loader; - - @BeforeEach - public void before() throws Exception { - System.setProperty("conf.root", "src/test/resources/conf/"); - System.setProperty("conf.file", "linkis-cli.properties"); - String configPath = System.getProperty("conf.root"); - String defaultConfFileName = System.getProperty("conf.file"); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - // load all config files - loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: setPropertiesReaders(PropertiesReader[] readers) */ - @Test - public void testSetPropertiesReaders() throws Exception { - // TODO: Test goes here... - loader.setPropertiesReaders(new PropertiesReader[0]); - } - - /** Method: getAllReadersAsMap() */ - @Test - public void testGetAllReadersAsMap() throws Exception { - // TODO: Test goes here... - } - - /** Method: addPropertiesReader(PropertiesReader reader) */ - @Test - public void testAddPropertiesReader() throws Exception { - // TODO: Test goes here... - } - - /** Method: addPropertiesReaders(PropertiesReader[] readers) */ - @Test - public void testAddPropertiesReaders() throws Exception { - // TODO: Test goes here... - } - - /** Method: getPropertiesReader(String identifier) */ - @Test - public void testGetPropertiesReader() throws Exception { - // TODO: Test goes here... - } - - /** Method: removePropertiesReader(String identifier) */ - @Test - public void testRemovePropertiesReader() throws Exception { - // TODO: Test goes here... - } - - /** Method: loadProperties() */ - @Test - public void testLoadProperties() throws Exception { - ClientProperties[] loaderResult = loader.loadProperties(); - - List properties = - Arrays.stream(loaderResult) - .sorted((p1, p2) -> p1.size() - p2.size()) - .collect(Collectors.toList()); - - assertEquals(2, properties.size()); - assertEquals(properties.get(0).getPropsId(), "user.properties"); - assertEquals(properties.get(0).size(), 4); - - assertEquals(properties.get(1).getPropsId(), "linkis-cli.properties"); - assertEquals(properties.get(1).size(), 8); - } - - /** Method: checkInit() */ - @Test - public void testCheckInit() throws Exception { - // TODO: Test goes here... - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/var/StdVarAccessTest.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/var/StdVarAccessTest.java deleted file mode 100644 index 497a539dd5..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/interactor/var/StdVarAccessTest.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.interactor.var; - -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.core.constants.TestConstants; -import org.apache.linkis.cli.core.interactor.command.TestCmdType; -import org.apache.linkis.cli.core.interactor.command.fitter.SingleTplFitter; -import org.apache.linkis.cli.core.interactor.command.parser.Parser; -import org.apache.linkis.cli.core.interactor.command.parser.SingleCmdParser; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.command.template.TestSparkCmdTemplate; -import org.apache.linkis.cli.core.interactor.properties.PropertiesLoader; -import org.apache.linkis.cli.core.interactor.properties.PropsFilesScanner; -import org.apache.linkis.cli.core.interactor.properties.StdPropsLoader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; -import org.apache.linkis.cli.core.interactor.validate.ParsedTplValidator; - -import org.apache.commons.lang3.StringUtils; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.junit.jupiter.api.AfterEach; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; - -public class StdVarAccessTest { - String[] cmdStr; - VarAccess stdVarAccess; - - @BeforeEach - public void before() throws Exception { - cmdStr = - new String[] { - "-u", - "hadoop", - "-pwd", - "1234", - "-c", - "src/test/resources/conf/user.properties", - "--cmd", - "show tables", - "--split", - "\',\'", - "--queue", - "q05", - "--name", - "testApp", - // "--hiveconf", "/path/...", - "--num-executors", - "4", - "--executor-cores", - "4", - "--executor-memory", - "4G", - "--shuffle-partitions", - "200", - "--other", - "--other-spark-config=none", - }; - - TestSparkCmdTemplate template = new TestSparkCmdTemplate(); - Map templateMap = new HashMap<>(); - templateMap.put(template.getCmdType().getName(), template); - - Parser parser = - new SingleCmdParser() - .setMapper(null) - .setTemplate(templateMap.get(TestCmdType.SPARK.getName())) - .setFitter(new SingleTplFitter()); - - ParseResult result = parser.parse(cmdStr); - - ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); - parsedTplValidator.doValidation(result.getParsedTemplate()); - - System.setProperty("conf.root", "src/test/resources/conf/"); - System.setProperty("conf.file", "linkis-cli.properties"); - String configPath = System.getProperty("conf.root"); - String defaultConfFileName = System.getProperty("conf.file"); - /* - default config, -Dconf.root & -Dconf.file specifies config path - */ - List readersList = - new PropsFilesScanner().getPropsReaders(configPath); // +1 user config - /* - user defined config - */ - String userConfPath = - (String) result.getParams().getParamItemMap().get(TestConstants.PARAM_USR_CONF).getValue(); - if (StringUtils.isNotBlank(userConfPath)) { - PropertiesReader reader = new PropsFileReader(); - reader.setPropsId("user.conf"); - reader.setPropsPath(userConfPath); - readersList.add(reader); - } else { - } - // load all config files - PropertiesLoader loader = - new StdPropsLoader() - .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); - ClientProperties[] loaderResult = loader.loadProperties(); - Map propertiesMap = new HashMap<>(); - for (ClientProperties properties : loaderResult) { - propertiesMap.put(properties.getPropsId(), properties); - } - - stdVarAccess = - new StdVarAccess() - .setCmdParams(result.getParams()) - .setUserConf(propertiesMap.get("user.conf")) - .setDefaultConf(propertiesMap.get(defaultConfFileName)) - .init(); - } - - @AfterEach - public void after() throws Exception {} - - /** Method: setPrimaryParam(Params primaryParam) */ - @Test - public void testSetPrimaryParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: getPrimaryParam(String identifier) */ - @Test - public void testGetPrimaryParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: setCmdParams(Params subParam) */ - @Test - public void testSetSubParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: getSubParam(String identifier) */ - @Test - public void testGetSubParam() throws Exception { - // TODO: Test goes here... - } - - /** Method: setUserConf(ClientProperties userConf) */ - @Test - public void testSetUserConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: getUserConf(String identifier) */ - @Test - public void testGetUserConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: setDefaultConf(ClientProperties defaultConf) */ - @Test - public void testSetDefaultConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: getDefaultConf(String identifier) */ - @Test - public void testGetDefaultConf() throws Exception { - // TODO: Test goes here... - } - - /** Method: checkInit() */ - @Test - public void testCheckInit() throws Exception { - // TODO: Test goes here... - } - - /** Method: getVar(Class clazz, String key) */ - @Test - public void testGetVar() throws Exception { - System.out.println(stdVarAccess.getVar(String.class, TestConstants.PARAM_COMMON_CMD)); - assertEquals(stdVarAccess.getVar(String.class, TestConstants.PARAM_COMMON_CMD), "show tables"); - System.out.println(stdVarAccess.getVar(String.class, "user.props")); - assertEquals(stdVarAccess.getVar(String.class, "wds.linkis.client.not.exist"), null); - System.out.println( - stdVarAccess.getVar( - Integer.class, TestConstants.PARAM_SPARK_EXECUTOR_CORES)); // see if priority works - assertEquals( - (long) stdVarAccess.getVar(Integer.class, TestConstants.PARAM_SPARK_EXECUTOR_CORES), 4); - assertEquals((long) stdVarAccess.getVar(Integer.class, "conf.prop.integer"), 9); - assertEquals(stdVarAccess.getVar(String.class, "conf.prop.string"), "str"); - assertEquals( - stdVarAccess.getVar(String.class, "wds.linkis.client.param.conf.spark.executor.memory"), - "11111G"); - - System.out.println(stdVarAccess.getAllVarKeys().length); - System.out.println(Arrays.toString(stdVarAccess.getAllVarKeys())); - assertTrue(stdVarAccess.getAllVarKeys().length != 0); - } - - /** Method: getVarOrDefault(Class clazz, String key, T defaultValue) */ - @Test - public void testGetVarOrDefault() throws Exception { - // TODO: Test goes here... - } - - /** Method: getAllVarKeys() */ - @Test - public void testGetAllVarKeys() throws Exception { - // TODO: Test goes here... - } - - /** Method: getVarFromParam(Class clazz, String key, Params param) */ - @Test - public void testGetVarFromParam() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getVarFromParam", Class.class, String.class, Params.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: getDefaultVarFromParam(Class clazz, String key, Params param) */ - @Test - public void testGetDefaultVarFromParam() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getDefaultVarFromParam", Class.class, String.class, Params.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** Method: getVarFromCfg(Class clazz, String key, ClientProperties conf) */ - @Test - public void testGetVarFromCfg() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getVarFromCfg", Class.class, String.class, ClientProperties.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } - - /** - * Method: getVarFromCfgGivenConverter(String key, ClientProperties conf, - * AbstractStringConverter converter) - */ - @Test - public void testGetVarFromCfgGivenConverter() throws Exception { - // TODO: Test goes here... - /* - try { - Method method = StdVarAccess.getClass().getMethod("getVarFromCfgGivenConverter", String.class, ClientProperties.class, AbstractStringConverter.class); - method.setAccessible(true); - method.invoke(, ); - } catch(NoSuchMethodException e) { - } catch(IllegalAccessException e) { - } catch(InvocationTargetException e) { - } - */ - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/utils/TestUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/utils/TestUtils.java deleted file mode 100644 index 61cdd6aa7f..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/java/org/apache/linkis/cli/core/utils/TestUtils.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.cli.core.utils; - -import org.apache.commons.io.FileUtils; -import org.apache.commons.lang3.StringUtils; - -import java.io.File; -import java.util.HashMap; -import java.util.Map; - -public class TestUtils { - private static String replaceParas(String context, Map m) { - if (context == null) { - return null; - } - for (Map.Entry entry : m.entrySet()) { - if (entry.getKey() == null || entry.getValue() == null) { - continue; - } - String key = "[#" + entry.getKey().trim() + "]"; - String value = entry.getValue().trim(); - - context = StringUtils.replace(context, key, value); - } - return context; - } - - public static Map parseArgMap(String str, String separator) { - - Map argsProps = new HashMap<>(); - String[] args = StringUtils.splitByWholeSeparator(str, separator); - - for (String arg : args) { - int index = arg.indexOf("="); - if (index != -1) { - argsProps.put(arg.substring(0, index).trim(), arg.substring(index + 1).trim()); - } - } - return argsProps; - } - - public static String readShellFileAndReplaceParas(String filename, String argsStr, String split) - throws Exception { - - String fileContent; - - File inputFile = new File(filename); - - fileContent = FileUtils.readFileToString(inputFile); - - Map argsMap = parseArgMap(argsStr, split); - - fileContent = replaceParas(fileContent, argsMap); - - return fileContent; - } -} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/linkis-cli.properties deleted file mode 100644 index e0f85cf3e5..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/conf/linkis-cli.properties +++ /dev/null @@ -1,34 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -wds.linkis.client.common.creator=IDE3 -wds.linkis.client.common.gatewayUrl=http://127.0.0.1:8088 -#wds.linkis.client.common.authStrategy -#wds.linki.clients.common.tokenKey -#wds.linkis.client.common.tokenValue -# -#wds.linkis.client.common.submitUser -#wds.linkis.client.common.submitPassword -#wds.linkis.client.common.proxyUser -wds.linkis.client.param.conf.wds.linkis.yarnqueue=q02 -wds.linkis.client.param.conf.yarnqueue.cores.max=233 -wds.linkis.client.param.conf.yarnqueue.memory.max=233G -wds.linkis.client.param.conf.spark.executor.instances=9 -wds.linkis.client.param.conf.spark.executor.cores=9 -wds.linkis.client.param.conf.spark.executor.memory=9 -#wds.linkis.client.label="key1=val1,key2=val2" -#wds.linkis.client.param.conf="key1=val1,key2=val2" -#wds.linkis.client.param.var="key1=val1,key2=val2" -#wds.linkis.client.source="key1=val1,key2=val2" \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/log4j2.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/log4j2.xml deleted file mode 100644 index c744678876..0000000000 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/test/resources/log4j2.xml +++ /dev/null @@ -1,50 +0,0 @@ - - - - - - ${sys:log.path} - ${sys:log.file} - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml index e9efc432d5..76723c4adb 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml +++ b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml @@ -25,15 +25,24 @@ ../../../pom.xml linkis-cli - pom - - - linkis-cli-common - linkis-cli-core - linkis-cli-application - + jar + + org.apache.linkis + linkis-computation-client + ${project.version} + + + org.apache.linkis + linkis-gateway-httpclient-support + ${project.version} + + + org.reflections + reflections + ${reflections.version} + org.apache.linkis linkis-common @@ -42,17 +51,53 @@ org.apache.commons commons-lang3 + ${commons-lang3.version} com.google.code.gson gson ${gson.version} - ${project.artifactId}-${project.version} + + + true + ${basedir}/src/main/resources + + + + + org.apache.maven.plugins + maven-assembly-plugin + false + + false + out + false + false + + /src/main/assembly/distribution.xml + + + + + make-assembly + + single + + package + + + /src/main/assembly/distribution.xml + + + + + + diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-client/linkis-cli/src/main/assembly/distribution.xml similarity index 98% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/assembly/distribution.xml index a1cf04696a..24463e39af 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/assembly/distribution.xml +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/assembly/distribution.xml @@ -16,7 +16,7 @@ ~ limitations under the License. --> - linkis-cli diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java new file mode 100644 index 0000000000..cfa57d4e26 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/CtxBuilder.java @@ -0,0 +1,185 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.CliCmdType; +import org.apache.linkis.cli.application.interactor.command.CmdTemplateFactory; +import org.apache.linkis.cli.application.interactor.command.fitter.SingleTplFitter; +import org.apache.linkis.cli.application.interactor.command.parser.Parser; +import org.apache.linkis.cli.application.interactor.command.parser.SingleCmdParser; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.interactor.context.CliCtxImpl; +import org.apache.linkis.cli.application.interactor.properties.ClientProperties; +import org.apache.linkis.cli.application.interactor.properties.PropertiesLoader; +import org.apache.linkis.cli.application.interactor.properties.PropsFilesScanner; +import org.apache.linkis.cli.application.interactor.properties.reader.PropertiesReader; +import org.apache.linkis.cli.application.interactor.properties.reader.PropsFileReader; +import org.apache.linkis.cli.application.interactor.validate.ParsedTplValidator; +import org.apache.linkis.cli.application.interactor.var.VarAccessImpl; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import java.io.*; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Properties; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CtxBuilder { + private static Logger logger = LoggerFactory.getLogger(CtxBuilder.class); + + /** generate Templates load env variables TODO: load version info */ + public static CliCtx buildCtx(String[] args) throws LinkisClientRuntimeException { + /* + user input + */ + CmdTemplate template = CmdTemplateFactory.getTemplateCopy(CliCmdType.UNIVERSAL); + Parser parser = + new SingleCmdParser() + .setMapper(null) + .setTemplate(template) + .setFitter(new SingleTplFitter()); + + ParseResult result = parser.parse(args); + + ParsedTplValidator parsedTplValidator = new ParsedTplValidator(); + + parsedTplValidator.doValidation(result.getParsedTemplate()); + + Params params = result.getParams(); + logger.debug("==========params============\n" + CliUtils.GSON.toJson(params)); + + /* + VarAccess for sys_prop, sys_env + */ + + Map propertiesMap = new HashMap<>(); + + LoggerManager.getInformationLogger() + .info( + "LogFile path: " + + System.getProperty(CliKeys.LOG_PATH_KEY) + + "/" + + System.getProperty(CliKeys.LOG_FILE_KEY)); + /* + default config, -Dconf.root & -Dconf.file specifies config path + */ + // scan config files given root path + String configPath = System.getProperty(CliKeys.CLIENT_CONFIG_ROOT_KEY); + String defaultConfFileName = + System.getProperty(CliKeys.DEFAULT_CONFIG_FILE_NAME_KEY, CliConstants.DEFAULT_CONFIG_NAME); + + if (StringUtils.isBlank(configPath)) { + throw new PropsException( + "PRP0007", + ErrorLevel.ERROR, + CommonErrMsg.PropsLoaderErr, + "configuration root path specified by env variable: " + + CliKeys.CLIENT_CONFIG_ROOT_KEY + + " is empty."); + } + + List readersList = + new PropsFilesScanner().getPropsReaders(configPath); // +1 user config + /* + user defined config + */ + String userConfPath = null; + if (params.containsParam(CliKeys.LINKIS_CLIENT_USER_CONFIG)) { + userConfPath = + (String) params.getParamItemMap().get(CliKeys.LINKIS_CLIENT_USER_CONFIG).getValue(); + } + if (StringUtils.isNotBlank(userConfPath)) { + PropertiesReader reader = + new PropsFileReader() + .setPropsId(CliKeys.LINKIS_CLIENT_USER_CONFIG) + .setPropsPath(userConfPath); + readersList.add(reader); + } else { + LoggerManager.getInformationLogger() + .info("User does not provide usr-configuration file. Will use default config"); + } + /* + load properties + */ + PropertiesLoader loader = + new PropertiesLoader() + .addPropertiesReaders(readersList.toArray(new PropertiesReader[readersList.size()])); + ClientProperties[] loaderResult = loader.loadProperties(); + for (ClientProperties properties : loaderResult) { + if (StringUtils.equals(properties.getPropsId(), CliKeys.LINKIS_CLIENT_USER_CONFIG)) { + for (Map.Entry prop : properties.entrySet()) { + if (StringUtils.startsWith( + (String) prop.getKey(), CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE)) { + throw new PropsException( + "PRP0007", + ErrorLevel.ERROR, + CommonErrMsg.PropsLoaderErr, + "User cannot specify non-customizable configuration: " + prop.getKey()); + } + } + } + propertiesMap.put(properties.getPropsId(), properties); + } + + /* + VarAccess for cmd, config + */ + VarAccess varAccess = + new VarAccessImpl() + .setCmdParams(params) + .setUserConf(propertiesMap.get(CliKeys.LINKIS_CLIENT_USER_CONFIG)) + .setDefaultConf(propertiesMap.get(defaultConfFileName)) + .init(); + logger.info("==========std_var============\n" + CliUtils.GSON.toJson(varAccess)); + + Properties props = new Properties(); + try (InputStream inputStream = + CtxBuilder.class.getClassLoader().getResourceAsStream("version.properties")) { + try (InputStreamReader reader = new InputStreamReader(inputStream)) { + try (BufferedReader bufferedReader = new BufferedReader(reader)) { + props.load(bufferedReader); + } + } + } catch (Exception e) { + logger.warn("Failed to load version info", e); + } + + String verion = props.getProperty(CliKeys.VERSION); + + Map extraMap = new HashMap<>(); + extraMap.put(CliKeys.VERSION, verion); + + return new CliCtxImpl(params.getCmdType(), template, varAccess, extraMap); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java new file mode 100644 index 0000000000..1fb21043a1 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/LinkisClientApplication.java @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.interactor.command.CmdTemplateFactory; +import org.apache.linkis.cli.application.interactor.command.template.UniversalCmdTemplate; +import org.apache.linkis.cli.application.interactor.job.help.HelpJob; +import org.apache.linkis.cli.application.interactor.job.interactive.InteractiveJob; +import org.apache.linkis.cli.application.interactor.job.jobcmd.JobCmdJob; +import org.apache.linkis.cli.application.interactor.job.once.LinkisOnceJob; +import org.apache.linkis.cli.application.interactor.job.version.VersionJob; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.once.OnceOperBuilder; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperBuilder; +import org.apache.linkis.cli.application.present.HelpPresenter; +import org.apache.linkis.cli.application.present.model.HelpInfoModel; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LinkisClientApplication { + private static Logger logger = LoggerFactory.getLogger(LinkisClientApplication.class); + + private static boolean showHelp = false; + + public static void main(String[] args) { + /* + generate template + */ + CmdTemplateFactory.register(new UniversalCmdTemplate()); + + /* + build ctx + */ + CliCtx ctx = null; + try { + ctx = CtxBuilder.buildCtx(args); + } catch (CommandException e) { + CmdTemplate template = CmdTemplateFactory.getTemplateOri(e.getCmdType()); + if (template != null) { + HelpInfoModel model = new HelpInfoModel(); + model.buildModel(ctx.getTemplate()); + new HelpPresenter().present(model); + } + LoggerManager.getInformationLogger().error("Failed to build CliCtx", e); + System.exit(-1); + } + + /* + prepare oper + */ + OperManager.register(CliKeys.Linkis_OPER, new LinkisOperBuilder()); + OperManager.register(CliKeys.LINKIS_ONCE, new OnceOperBuilder()); + + /* + run job + */ + Job job; + if (isVersionCmd(ctx)) { + job = new VersionJob(); + } else if (isHelp(ctx)) { + job = new HelpJob(); + } else if (isJobCmd(ctx)) { + job = new JobCmdJob(); + } else if (isOnceCmd(ctx)) { + job = new LinkisOnceJob(); + } else { + job = new InteractiveJob(); + } + job.build(ctx); + JobResult result; + try { + Runtime.getRuntime() + .addShutdownHook( + new Thread( + () -> { + if (job != null) { + job.onDestroy(); + } + })); + result = job.run(); + } catch (Exception e) { + logger.error("Failed to execute job", e); + result = + new JobResult() { + @Override + public Boolean isSuccess() { + return false; + } + + @Override + public String getMessage() { + return "Failed to execute job" + ExceptionUtils.getStackTrace(e); + } + + @Override + public Map getExtraMessage() { + return new HashMap<>(); + } + }; + } + + /* + process result + */ + printIndicator(result); + + SchedulerManager.shutDown(); + + if (result.isSuccess()) { + System.exit(0); + } else { + System.exit(-1); + } + } + + private static void printIndicator(JobResult jobResult) { + if (jobResult.isSuccess()) { + LoggerManager.getPlaintTextLogger().info(CliConstants.SUCCESS_INDICATOR); + } else { + LoggerManager.getPlaintTextLogger().info(jobResult.getMessage()); + StringBuilder b = new StringBuilder(); + for (Map.Entry e : jobResult.getExtraMessage().entrySet()) { + b.append(e.getKey()).append(":").append(e.getValue()).append(System.lineSeparator()); + } + LoggerManager.getPlaintTextLogger().info(b.toString()); + LoggerManager.getPlaintTextLogger().info(CliConstants.FAILURE_INDICATOR); + } + } + + private static Boolean isHelp(CliCtx ctx) { + if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_HELP_OPT)) { + return true; + } + return false; + } + + private static Boolean isVersionCmd(CliCtx ctx) { + if (ctx.getVarAccess().hasVar(CliKeys.VERSION)) { + return true; + } + return false; + } + + private static Boolean isJobCmd(CliCtx ctx) { + if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_KILL_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_STATUS_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_DESC_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LOG_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_RESULT_OPT) + || ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LIST_OPT)) { + return true; + } + return false; + } + + private static Boolean isOnceCmd(CliCtx ctx) { + String mode = + ctx.getVarAccess() + .getVarOrDefault(String.class, CliKeys.LINKIS_CLIENT_MODE_OPT, CliConstants.UJES_MODE); + return StringUtils.equalsIgnoreCase(mode, CliConstants.ONCE_MODE); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliConstants.java similarity index 66% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliConstants.java index 711fc2d156..d277f435fe 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppConstants.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliConstants.java @@ -17,7 +17,7 @@ package org.apache.linkis.cli.application.constants; -public class AppConstants { +public class CliConstants { public static final String DEFAULT_CONFIG_NAME = "linkis-cli.properties"; public static final Long JOB_QUERY_SLEEP_MILLS = 2000l; @@ -41,4 +41,29 @@ public class AppConstants { public static final String UJES_MODE = "ujes"; public static final String ONCE_MODE = "once"; + + public static final Integer REQUEST_MAX_RETRY_TIME = 3; + + public static final String UNIVERSAL_SUBCMD = "linkis-cli"; + + public static final String UNIVERSAL_SUBCMD_DESC = + "command for all types of jobs supported by Linkis"; + + public static final String SUCCESS_INDICATOR = "############Execute Success!!!########"; + + public static final String FAILURE_INDICATOR = "############Execute Error!!!########"; + + public static final String ARRAY_SEQ = "@#@"; + + public static final String ARRAY_SEQ_REGEX = "(?=([^\"]*\"[^\"]*\")*[^\"]*$)"; + + public static final int MAX_NUM_OF_COMMAND_ARGUEMENTS = 10; + + public static final String CONFIG_DIR = "config.path"; + + public static final String[] CONFIG_EXTENSION = {"properties"}; + + public static final String SYSTEM_PROPERTIES_IDENTIFIER = "SYS_PROP"; + + public static final String SYSTEM_ENV_IDENTIFIER = "SYS_ENV"; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliKeys.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppKeys.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliKeys.java index 7a38d9191f..966836e0bf 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/AppKeys.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/CliKeys.java @@ -17,7 +17,7 @@ package org.apache.linkis.cli.application.constants; -public class AppKeys { +public class CliKeys { /** User Not configurable */ public static final String ADMIN_USERS = "hadoop,root,shangda"; @@ -38,6 +38,8 @@ public class AppKeys { public static final String DEFAULT_CONFIG_FILE_NAME_KEY = "conf.file"; public static final String LINUX_USER_KEY = "user.name"; + public static final String VERSION = "cli.version"; + /** Configurable */ /* execution type @@ -75,6 +77,8 @@ public class AppKeys { JOB_LABEL + "." + LinkisKeys.KEY_CODETYPE; // corresponds to server api. public static final String JOB_LABEL_EXECUTEONCE = JOB_LABEL + "." + LinkisKeys.KEY_EXECUTEONCE; // corresponds to server api. + public static final String JOB_LABEL_CLUSTER = + JOB_LABEL + "." + LinkisKeys.KEY_CLUSTER; // corresponds to server api. /* Job command @@ -139,6 +143,6 @@ public class AppKeys { public static final String LINKIS_CLIENT_COMMON_RESULT_SET_PAGE_SIZE = LINKIS_CLIENT_COMMON + ".resultset.page.size"; - public static final String REUSABLE_UJES_CLIENT = "reusable.ujes.client"; - public static final String REUSABLE_ONCEJOB_CLIENT = "reusable.onceJob.client"; + public static final String Linkis_OPER = "linkis.oper"; + public static final String LINKIS_ONCE = "linkis.once"; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisConstants.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java index 9488c25f4a..170f1a8f86 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/constants/LinkisKeys.java @@ -34,6 +34,7 @@ public class LinkisKeys { public static final String KEY_EXECID = "execId"; public static final String KEY_UMUSER = "umUser"; public static final String KEY_EXECUTEONCE = "executeOnce"; + public static final String KEY_CLUSTER = "yarnCluster"; public static final String KEY_TENANT = "tenant"; public static final String META_DATA_COLUMN_NAME = "columnName"; public static final String KEY_SHELL_WORKING_DIRECTORY = @@ -46,4 +47,5 @@ public class LinkisKeys { public static final String KEY_YARN_QUEUE = "wds.linkis.rm.yarnqueue"; public static final String KEY_HIVE_RESULT_DISPLAY_TBALE = "hive.resultset.use.unique.column.names"; + public static final String CLI_VERSION = "cli.version"; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdOption.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdOption.java index f9b6924be5..1583090c88 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdOption.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; public interface CmdOption extends Cloneable { String getParamName(); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdTemplate.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdTemplate.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdTemplate.java index da802bc5ff..94677774a8 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdTemplate.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdTemplate.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; import java.util.List; import java.util.Map; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdType.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdType.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdType.java index 6819fdb92b..2f9dd803e8 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/CmdType.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/CmdType.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; public interface CmdType extends Cloneable { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/ParamItem.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/ParamItem.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/ParamItem.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/ParamItem.java index 02a33cfc95..c8a28d9009 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/ParamItem.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/ParamItem.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; public class ParamItem { private String keyPrefix; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/Params.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/Params.java similarity index 97% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/Params.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/Params.java index 9fa73c01fc..558521ad72 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/command/Params.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/command/Params.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.command; +package org.apache.linkis.cli.application.entity.command; import java.util.Map; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/Execution.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/context/CliCtx.java similarity index 62% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/Execution.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/context/CliCtx.java index 23e99601eb..f18a402c2e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/execution/Execution.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/context/CliCtx.java @@ -15,21 +15,25 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.execution; +package org.apache.linkis.cli.application.entity.context; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.result.ExecutionResult; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.entity.var.VarAccess; import java.util.Map; -public interface Execution { - /* - start execution - */ - ExecutionResult execute(Map jobs); +public interface CliCtx { + /** + * accessing and passing VarAccess instance between jobs + * + * @return + */ + CmdType getCmdType(); - /* - terminate execution(often used in shutdown hook) - */ - boolean terminate(Map jobs); + CmdTemplate getTemplate(); + + VarAccess getVarAccess(); + + Map getExtraMap(); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/Job.java similarity index 79% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/Job.java index 228b75856d..303c530c2a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/DisplayOperator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/Job.java @@ -15,10 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present.display; +package org.apache.linkis.cli.application.entity.job; -import org.apache.linkis.cli.core.present.display.data.DisplayData; +import org.apache.linkis.cli.application.entity.context.CliCtx; -public interface DisplayOperator { - void doOutput(DisplayData data); +public interface Job { + void build(CliCtx ctx); + + JobResult run(); + + void onDestroy(); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobResult.java similarity index 80% rename from linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobResult.java index 873b098194..eba7b7ba8a 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/LogCallbackService.scala +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobResult.java @@ -15,12 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.ecm.server.service +package org.apache.linkis.cli.application.entity.job; -import org.apache.linkis.protocol.callback.LogCallbackProtocol +import java.util.Map; -trait LogCallbackService { +public interface JobResult { + Boolean isSuccess(); - def dealLog(protocol: LogCallbackProtocol): Unit + String getMessage(); + Map getExtraMessage(); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobStatus.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobStatus.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobStatus.java index d0b6f445b6..7f048dd88a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobStatus.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/job/JobStatus.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.job; +package org.apache.linkis.cli.application.entity.job; public interface JobStatus { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/version/Version.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/operator/JobOper.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/version/Version.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/operator/JobOper.java index 7f005b0bbd..ae1bf7c465 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/version/Version.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/operator/JobOper.java @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.version; +package org.apache.linkis.cli.application.entity.operator; -public interface Version {} +public interface JobOper {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Model.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Model.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Model.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Model.java index b68fd986aa..9fe2c106fc 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Model.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Model.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.present; +package org.apache.linkis.cli.application.entity.present; public interface Model { void buildModel(Object data); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Presenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Presenter.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Presenter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Presenter.java index dbe1adaefe..7c27316dfb 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/Presenter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/present/Presenter.java @@ -15,8 +15,8 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.present; +package org.apache.linkis.cli.application.entity.present; public interface Presenter { - void present(Model model, PresentWay presentWay); + void present(Model model); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/var/VarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/var/VarAccess.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/var/VarAccess.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/var/VarAccess.java index 50df2236e1..8236bd81c9 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/var/VarAccess.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/entity/var/VarAccess.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.var; +package org.apache.linkis.cli.application.entity.var; /** * @program: linkis-cli @@ -24,8 +24,6 @@ */ public interface VarAccess { - void checkInit(); - T getVar(Class clazz, String key); T getVarOrDefault(Class clazz, String key, T defaultValue); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/BuilderException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/BuilderException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/BuilderException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/BuilderException.java index b4f35643aa..b00c15f7e0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/BuilderException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/BuilderException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class BuilderException extends LinkisClientRuntimeException { private static final long serialVersionUID = 5454234257L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/CommandException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/CommandException.java similarity index 85% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/CommandException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/CommandException.java index 6f6c5512d4..e0cff74b1e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/CommandException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/CommandException.java @@ -15,12 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class CommandException extends LinkisClientRuntimeException { private static final long serialVersionUID = 745261661L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientException.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientException.java index 7ffd880b73..9dcb69a410 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientException.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception; +package org.apache.linkis.cli.application.exception; public abstract class LinkisClientException extends Exception { private static final long serialVersionUID = 42563456489L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/LinkisClientExecutionException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientExecutionException.java similarity index 84% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/LinkisClientExecutionException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientExecutionException.java index e97d068d4e..3acfd71f99 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/LinkisClientExecutionException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientExecutionException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class LinkisClientExecutionException extends LinkisClientRuntimeException { private static final long serialVersionUID = 987189405659L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientRuntimeException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientRuntimeException.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientRuntimeException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientRuntimeException.java index 98538b05e4..0d5f4f7976 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/LinkisClientRuntimeException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/LinkisClientRuntimeException.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; import java.text.MessageFormat; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PresenterException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PresenterException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PresenterException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PresenterException.java index dbfd57a2cc..42aa1b1e17 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PresenterException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PresenterException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class PresenterException extends LinkisClientRuntimeException { private static final long serialVersionUID = 212314213L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PropsException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PropsException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PropsException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PropsException.java index e7e3799dcc..d39a9de3c0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/PropsException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/PropsException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class PropsException extends LinkisClientRuntimeException { private static final long serialVersionUID = 182747823415933L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/TransformerException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/TransformerException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/TransformerException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/TransformerException.java index efb578b971..d5b35c6c8d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/TransformerException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/TransformerException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class TransformerException extends LinkisClientRuntimeException { private static final long serialVersionUID = 5454234257L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/UnknownException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/UnknownException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/UnknownException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/UnknownException.java index ff5cfac139..a3fecf3537 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/UnknownException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/UnknownException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class UnknownException extends LinkisClientRuntimeException { private static final long serialVersionUID = 974159L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/ValidateException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/ValidateException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/ValidateException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/ValidateException.java index 95896d9aa0..f796328dd5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/ValidateException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/ValidateException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class ValidateException extends LinkisClientRuntimeException { private static final long serialVersionUID = 5454234257L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/VarAccessException.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/VarAccessException.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/VarAccessException.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/VarAccessException.java index 34509947d4..31dc848e36 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/VarAccessException.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/VarAccessException.java @@ -15,11 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception; +package org.apache.linkis.cli.application.exception; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.exception.error.ErrorMsg; public class VarAccessException extends LinkisClientRuntimeException { private static final long serialVersionUID = 125344127L; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/error/CommonErrMsg.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/CommonErrMsg.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/error/CommonErrMsg.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/CommonErrMsg.java index 67eb0701c7..ea6247e0c4 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/exception/error/CommonErrMsg.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/CommonErrMsg.java @@ -15,9 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.exception.error; - -import org.apache.linkis.cli.common.exception.error.ErrorMsg; +package org.apache.linkis.cli.application.exception.error; public enum CommonErrMsg implements ErrorMsg { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorLevel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorLevel.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorLevel.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorLevel.java index ed2867c72d..170cedf9a5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorLevel.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorLevel.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception.error; +package org.apache.linkis.cli.application.exception.error; public enum ErrorLevel { /** warn 1 error 2 fatal 3 */ diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorMsg.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorMsg.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorMsg.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorMsg.java index 3d092fe67d..4b6c5d3244 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/exception/error/ErrorMsg.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/exception/error/ErrorMsg.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.exception.error; +package org.apache.linkis.cli.application.exception.error; public interface ErrorMsg { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CliCmdType.java similarity index 78% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdType.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CliCmdType.java index f03c3e779b..f2a352c542 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdType.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CliCmdType.java @@ -17,23 +17,23 @@ package org.apache.linkis.cli.application.interactor.command; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.core.constants.CommonConstants; +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.command.CmdType; -public enum LinkisCmdType implements CmdType { - UNIVERSAL(CommonConstants.UNIVERSAL_SUBCMD, 1, CommonConstants.UNIVERSAL_SUBCMD_DESC); +public enum CliCmdType implements CmdType { + UNIVERSAL(CliConstants.UNIVERSAL_SUBCMD, 1, CliConstants.UNIVERSAL_SUBCMD_DESC); private int id; private String name; private String desc; - LinkisCmdType(String name, int id) { + CliCmdType(String name, int id) { this.id = id; this.name = name; this.desc = null; } - LinkisCmdType(String name, int id, String desc) { + CliCmdType(String name, int id, String desc) { this.id = id; this.name = name; this.desc = desc; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/CmdTemplateFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CmdTemplateFactory.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/CmdTemplateFactory.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CmdTemplateFactory.java index 91a9f796cb..f3b901c1f8 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/CmdTemplateFactory.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/CmdTemplateFactory.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command; +package org.apache.linkis.cli.application.interactor.command; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/SpecialMap.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/SpecialMap.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/SpecialMap.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/SpecialMap.java index 86075e3328..427c35baef 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/SpecialMap.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/SpecialMap.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command; +package org.apache.linkis.cli.application.interactor.command; import java.util.HashMap; import java.util.Map; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/AbstractFitter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/AbstractFitter.java similarity index 89% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/AbstractFitter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/AbstractFitter.java index d32d05bbb6..f5e1a99b58 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/AbstractFitter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/AbstractFitter.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.template.option.Flag; -import org.apache.linkis.cli.core.interactor.command.template.option.Parameter; +package org.apache.linkis.cli.application.interactor.command.fitter; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.template.option.Flag; +import org.apache.linkis.cli.application.interactor.command.template.option.Parameter; import org.apache.commons.lang3.StringUtils; @@ -225,7 +225,7 @@ private final int setParameterValue( Parameter param = (Parameter) cmdOption; if (param.accepctArrayValue()) { String[] args2 = Arrays.copyOfRange(args, argIdx, args.length); - param.setValueWithStr(StringUtils.join(args2, CommonConstants.ARRAY_SEQ)); + param.setValueWithStr(StringUtils.join(args2, CliConstants.ARRAY_SEQ)); return args.length; } else { parameters.get(paraIdx).setValueWithStr(args[argIdx]); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/Fitter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/Fitter.java similarity index 80% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/Fitter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/Fitter.java index 305ffcaef1..cfd0f0fcfa 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/Fitter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/Fitter.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; public interface Fitter { FitterResult fit(String[] input, CmdTemplate templateCopy) throws LinkisClientRuntimeException; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterResult.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterResult.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterResult.java index 5e9f9820b1..abcee0d9af 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterResult.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterResult.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; public class FitterResult { String[] remains; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterUtils.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtils.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterUtils.java index 40d80d84b2..1b6b46cd10 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/FitterUtils.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/FitterUtils.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; public class FitterUtils { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/SingleTplFitter.java similarity index 79% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/SingleTplFitter.java index 8eed2ea89f..06b7d7e4f8 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/fitter/SingleTplFitter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/fitter/SingleTplFitter.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.fitter; +package org.apache.linkis.cli.application.interactor.command.fitter; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import java.util.ArrayList; import java.util.List; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/AbstarctParser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/AbstarctParser.java similarity index 85% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/AbstarctParser.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/AbstarctParser.java index 30c0e1f014..16107b1869 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/AbstarctParser.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/AbstarctParser.java @@ -15,20 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.ParamItem; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; -import org.apache.linkis.cli.core.interactor.command.fitter.Fitter; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; -import org.apache.linkis.cli.core.interactor.command.parser.transformer.ParamKeyMapper; +package org.apache.linkis.cli.application.interactor.command.parser; + +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.ParamItem; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.fitter.Fitter; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.interactor.command.parser.transformer.ParamKeyMapper; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/Parser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/Parser.java similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/Parser.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/Parser.java index 0f3c847740..0e10977a63 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/Parser.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/Parser.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser; +package org.apache.linkis.cli.application.interactor.command.parser; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; /** * 1. parse cmd arguments and fill into template 2. generate unique identifier for sub command 3. diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParser.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/SingleCmdParser.java similarity index 77% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParser.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/SingleCmdParser.java index aea3086fd5..4afb7a759f 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/SingleCmdParser.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/SingleCmdParser.java @@ -15,15 +15,15 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser; +package org.apache.linkis.cli.application.interactor.command.parser; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.fitter.FitterResult; -import org.apache.linkis.cli.core.interactor.command.parser.result.ParseResult; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.fitter.FitterResult; +import org.apache.linkis.cli.application.interactor.command.parser.result.ParseResult; import java.util.Arrays; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/result/ParseResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/result/ParseResult.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/result/ParseResult.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/result/ParseResult.java index 2a528f3b86..75059c86c5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/result/ParseResult.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/result/ParseResult.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser.result; +package org.apache.linkis.cli.application.interactor.command.parser.result; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.Params; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.Params; public class ParseResult { CmdTemplate parsedTemplate; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/transformer/ParamKeyMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/transformer/ParamKeyMapper.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/transformer/ParamKeyMapper.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/transformer/ParamKeyMapper.java index 3799906bd5..f162a1c84c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/parser/transformer/ParamKeyMapper.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/parser/transformer/ParamKeyMapper.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.parser.transformer; +package org.apache.linkis.cli.application.interactor.command.parser.transformer; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.utils.CommonUtils; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.utils.CliUtils; import org.apache.commons.lang3.StringUtils; @@ -79,7 +79,7 @@ public void updateMapping(String key, String targetKey) { /** update keyMapping according to kv-String. */ private void updateMappingbyConfig(String kvString) { if (StringUtils.isNotBlank(kvString)) { - Map result = CommonUtils.parseKVStringToMap(kvString, ","); + Map result = CliUtils.parseKVStringToMap(kvString, ","); this.mapperRules.putAll(result); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/AbstractCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/AbstractCmdTemplate.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/AbstractCmdTemplate.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/AbstractCmdTemplate.java index 992b58effc..ec1f318c2b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/AbstractCmdTemplate.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/AbstractCmdTemplate.java @@ -15,20 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template; - -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.command.CmdType; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; -import org.apache.linkis.cli.core.interactor.command.template.option.*; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; -import org.apache.linkis.cli.core.utils.converter.PredefinedStringConverters; +package org.apache.linkis.cli.application.interactor.command.template; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.PredefinedStringConverters; +import org.apache.linkis.cli.application.interactor.command.template.option.*; import java.lang.reflect.Field; import java.util.*; @@ -291,10 +291,10 @@ protected final SpecialMapOption speciaMapOption( private void checkIllegalOption(final String[] names) { if (names == null || names.length <= 0) { throw new IllegalArgumentException("At least one cmdType should be given to CmdOption."); - } else if (names.length > CommonConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS) { + } else if (names.length > CliConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS) { throw new IllegalArgumentException( "At most " - + CommonConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS + + CliConstants.MAX_NUM_OF_COMMAND_ARGUEMENTS + " cmdType can be given to CmdOption."); } else { for (String name : names) { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java similarity index 74% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java index e238038aea..f66f5ab386 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/UniversalCmdTemplate.java @@ -17,15 +17,14 @@ package org.apache.linkis.cli.application.interactor.command.template; -import org.apache.linkis.cli.application.constants.AppConstants; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.application.interactor.command.LinkisCmdType; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.template.AbstractCmdTemplate; -import org.apache.linkis.cli.core.interactor.command.template.option.*; +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.CliCmdType; +import org.apache.linkis.cli.application.interactor.command.template.option.*; import org.apache.commons.lang3.StringUtils; @@ -36,109 +35,118 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption gatewayUrl = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_GATEWAY_URL, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_GATEWAY_URL, new String[] {"--gatewayUrl"}, "specify linkis gateway url", true, ""); protected StdOption authenticatationStrategy = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, new String[] {"--authStg"}, "specify linkis authentication strategy", true, ""); protected StdOption authKey = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_TOKEN_KEY, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_TOKEN_KEY, new String[] {"--authKey"}, "specify linkis authentication key(tokenKey)", true, ""); protected StdOption authValue = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_COMMON_TOKEN_VALUE, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_COMMON_TOKEN_VALUE, new String[] {"--authVal"}, "specify linkis authentication value(tokenValue)", true, ""); protected StdOption userConfigPath = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_USER_CONFIG, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_USER_CONFIG, new String[] {"--userConf"}, "specify user configuration file path(absolute)", true, ""); protected StdOption killOpt = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_KILL_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_KILL_OPT, new String[] {"--kill"}, "specify linkis taskId for job to be killed", true, ""); protected StdOption logOpt = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_LOG_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_LOG_OPT, new String[] {"--log"}, "specify linkis taskId for querying job status", true, ""); protected StdOption resultOpt = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_RESULT_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_RESULT_OPT, new String[] {"--result"}, "specify linkis taskId for querying job status", true, ""); protected StdOption statusOpt = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_STATUS_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_STATUS_OPT, new String[] {"--status"}, "specify linkis taskId for querying job status", true, ""); protected StdOption asyncOpt = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_ASYNC_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_ASYNC_OPT, new String[] {"--async"}, "specify linkis taskId for querying job status", true, false); protected StdOption modeOpt = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_MODE_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_MODE_OPT, new String[] {"--mode"}, "specify linkis execution mode: " - + AppConstants.UJES_MODE + + CliConstants.UJES_MODE + "/" - + AppConstants.ONCE_MODE + + CliConstants.ONCE_MODE + ".", true, - AppConstants.UJES_MODE); + CliConstants.UJES_MODE); protected Flag helpOpt = flag( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_HELP_OPT, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_HELP_OPT, new String[] {"--help"}, "specify linkis taskId for querying job status", true, false); + protected StdOption clusterOP = + option( + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL_CLUSTER, + new String[] {"-yarnCluster"}, + "specify linkis yarn cluster for this job", + true, + ""); + protected StdOption engineTypeOP = option( - AppKeys.JOB_LABEL, - AppKeys.JOB_LABEL_ENGINE_TYPE, + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL_ENGINE_TYPE, new String[] {"-engineType"}, "specify linkis engineType for this job", true, @@ -146,16 +154,16 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption codeTypeOp = option( - AppKeys.JOB_LABEL, - AppKeys.JOB_LABEL_CODE_TYPE, + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL_CODE_TYPE, new String[] {"-codeType"}, "specify linkis runType for this job", true, ""); protected StdOption codePathOp = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_CODE_PATH, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_CODE_PATH, new String[] {"-codePath"}, "specify file path that contains code you want to execute", true, @@ -163,8 +171,8 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption codeOp = option( - AppKeys.JOB_EXEC, - AppKeys.JOB_EXEC_CODE, + CliKeys.JOB_EXEC, + CliKeys.JOB_EXEC_CODE, new String[] {"-code"}, "specify code that you want to execute", true, @@ -172,8 +180,8 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption scriptPathOp = option( - AppKeys.JOB_SOURCE, - AppKeys.JOB_SOURCE_SCRIPT_PATH, + CliKeys.JOB_SOURCE, + CliKeys.JOB_SOURCE_SCRIPT_PATH, new String[] {"-scriptPath"}, "specify remote path for your uploaded script", true, @@ -181,8 +189,8 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption submitUser = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_SUBMIT_USER, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_SUBMIT_USER, new String[] {"-submitUser"}, "specify submit user for this job", true, @@ -190,8 +198,8 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption proxyUser = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_PROXY_USER, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_PROXY_USER, new String[] {"-proxyUser"}, "specify proxy user who executes your code in Linkis server-side", true, @@ -199,8 +207,8 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption creatorOp = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_COMMON_CREATOR, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_COMMON_CREATOR, new String[] {"-creator"}, "specify creator for this job", true, @@ -208,8 +216,8 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected StdOption outPathOp = option( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH, new String[] {"-outPath"}, "specify output path for resultSet. If not specified, then output reset to screen(stdout)", true, @@ -217,67 +225,79 @@ public class UniversalCmdTemplate extends AbstractCmdTemplate implements Cloneab protected MapOption confMapOp = mapOption( - AppKeys.JOB_PARAM_CONF, - AppKeys.JOB_PARAM_CONF, + CliKeys.JOB_PARAM_CONF, + CliKeys.JOB_PARAM_CONF, new String[] {"-confMap"}, "specify configurationMap(startupMap) for your job. You can put any start-up parameters into this Map(e.g. spark.executor.instances). Input format: -confMap key1=value1 -confMap key2=value2", true); protected MapOption runtimeMapOp = mapOption( - AppKeys.JOB_PARAM_RUNTIME, - AppKeys.JOB_PARAM_RUNTIME, + CliKeys.JOB_PARAM_RUNTIME, + CliKeys.JOB_PARAM_RUNTIME, new String[] {"-runtimeMap"}, "specify runtimeMap for your job. You can put any start-up parameters into this Map(e.g. spark.executor.instances). Input format: -runtimeMap key1=value1 -runtimeMap key2=value2", true); protected SpecialMapOption varMapOp = speciaMapOption( - AppKeys.JOB_PARAM_VAR, - AppKeys.JOB_PARAM_VAR, + CliKeys.JOB_PARAM_VAR, + CliKeys.JOB_PARAM_VAR, new String[] {"-varMap"}, "specify variables map. Variables is for key-word substitution. Use \'${key}\' to specify key-word. Input substitution rule as follow: -varMap key1=value1 -varMap key2=value2", true); protected MapOption labelMapOp = mapOption( - AppKeys.JOB_LABEL, - AppKeys.JOB_LABEL, + CliKeys.JOB_LABEL, + CliKeys.JOB_LABEL, new String[] {"-labelMap"}, "specify label map. You can put any Linkis into this Map. Input format: -labelMap labelName1=labelValue1 -labelMap labelName2=labelValue2", true); protected MapOption sourceMapOp = mapOption( - AppKeys.JOB_SOURCE, - AppKeys.JOB_SOURCE, + CliKeys.JOB_SOURCE, + CliKeys.JOB_SOURCE, new String[] {"-sourceMap"}, "specify source map. Input format: -sourceMap key1=value1 -sourceMap key2=value2", true); protected MapOption jobContentMapOp = mapOption( - AppKeys.JOB_CONTENT, - AppKeys.JOB_CONTENT, + CliKeys.JOB_CONTENT, + CliKeys.JOB_CONTENT, new String[] {"-jobContentMap"}, "specify jobContent map. Input format: -jobContentMap key1=value1 -jobContentMap key2=value2", true); + protected Flag versionFlag = + flag( + CliKeys.VERSION, + CliKeys.VERSION, + new String[] {"--version"}, + "show version", + true, + false); + protected Parameter argumentsParas = parameter( - AppKeys.LINKIS_CLIENT_COMMON, - AppKeys.JOB_EXTRA_ARGUMENTS, + CliKeys.LINKIS_CLIENT_COMMON, + CliKeys.JOB_EXTRA_ARGUMENTS, "arguments", "specify arguments if exist any", true, new String[] {""}); public UniversalCmdTemplate() { - super(LinkisCmdType.UNIVERSAL); + super(CliCmdType.UNIVERSAL); } @Override public void checkParams() throws CommandException { + if (versionFlag.hasVal()) { + return; + } int cnt = 0; if (statusOpt.hasVal()) { cnt++; @@ -349,7 +369,7 @@ public void checkParams() throws CommandException { cnt2++; } if (!modeOpt.hasVal() - || StringUtils.equalsIgnoreCase(modeOpt.getValue(), AppConstants.UJES_MODE)) { + || StringUtils.equalsIgnoreCase(modeOpt.getValue(), CliConstants.UJES_MODE)) { if (cnt2 > 1) { throw new ValidateException( "VLD0001", diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/AbstractStringConverter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/AbstractStringConverter.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/AbstractStringConverter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/AbstractStringConverter.java index 3d5afaa172..68f3fae396 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/AbstractStringConverter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/AbstractStringConverter.java @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils.converter; +package org.apache.linkis.cli.application.interactor.command.template.converter; public abstract class AbstractStringConverter implements Converter {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/Converter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/Converter.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/Converter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/Converter.java index 327f44cf2e..757d73d178 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/Converter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/Converter.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils.converter; +package org.apache.linkis.cli.application.interactor.command.template.converter; public interface Converter { TO convert(FROM from); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/PredefinedStringConverters.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/PredefinedStringConverters.java similarity index 92% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/PredefinedStringConverters.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/PredefinedStringConverters.java index 5883f68e1b..b6e65c1e33 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/converter/PredefinedStringConverters.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/converter/PredefinedStringConverters.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils.converter; +package org.apache.linkis.cli.application.interactor.command.template.converter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; import org.apache.commons.lang3.StringUtils; @@ -50,7 +50,7 @@ public String[] convert(String from) { if (StringUtils.isBlank(from)) { return null; } - String[] ret = from.trim().split(CommonConstants.ARRAY_SEQ); + String[] ret = from.trim().split(CliConstants.ARRAY_SEQ); for (int i = 0; i < ret.length; i++) { ret[i] = StringUtils.strip(ret[i], " \""); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/BaseOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/BaseOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java index 8e9f288567..e497401bef 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/BaseOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/BaseOption.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Flag.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java similarity index 91% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Flag.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java index 8b153fb7ac..ee66a64463 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Flag.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Flag.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.utils.converter.PredefinedStringConverters; +import org.apache.linkis.cli.application.interactor.command.template.converter.PredefinedStringConverters; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/MapOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/MapOption.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/MapOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/MapOption.java index 095004c4d1..d330d11c9b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/MapOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/MapOption.java @@ -15,11 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Parameter.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Parameter.java index 09422106c8..802f451ebb 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/Parameter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/Parameter.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/SpecialMapOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/SpecialMapOption.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/SpecialMapOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/SpecialMapOption.java index d14758206b..3fc6385db0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/SpecialMapOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/SpecialMapOption.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; /** kv-pairs in SpecialMapOption will be excluded by varAccess */ public class SpecialMapOption extends MapOption implements Cloneable { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/StdOption.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/StdOption.java similarity index 92% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/StdOption.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/StdOption.java index a9d29aef26..737cd73423 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/command/template/option/StdOption.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/command/template/option/StdOption.java @@ -15,9 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.command.template.option; +package org.apache.linkis.cli.application.interactor.command.template.option; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/context/CliCtxImpl.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/context/CliCtxImpl.java new file mode 100644 index 0000000000..7d4f5d94b8 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/context/CliCtxImpl.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.context; + +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.command.CmdType; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.var.VarAccess; + +import java.util.Map; + +public class CliCtxImpl implements CliCtx { + private CmdType cmdType; + private CmdTemplate cmdTemplate; + private VarAccess varAccess; + private Map extraMap; + + public CliCtxImpl( + CmdType cmdType, CmdTemplate cmdTemplate, VarAccess varAccess, Map extraMap) { + this.cmdType = cmdType; + this.cmdTemplate = cmdTemplate; + this.varAccess = varAccess; + this.extraMap = extraMap; + } + + @Override + public CmdType getCmdType() { + return cmdType; + } + + @Override + public CmdTemplate getTemplate() { + return cmdTemplate; + } + + @Override + public VarAccess getVarAccess() { + return varAccess; + } + + @Override + public Map getExtraMap() { + return extraMap; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/JobKiller.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/JobKiller.java new file mode 100644 index 0000000000..6693abe85f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/JobKiller.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.text.MessageFormat; +import java.util.HashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JobKiller { + private static final Logger logger = LoggerFactory.getLogger(JobKiller.class); + + private LinkisJobOper oper; + + public JobKiller(LinkisJobOper linkisJobOperator) { + this.oper = linkisJobOperator; + } + + public KillResult doKill(String username, String jobId) { + + LinkisOperResultAdapter jobInfoResult; + try { + jobInfoResult = oper.queryJobInfo(username, jobId); + } catch (Exception e) { + return new KillResult( + true, "Failed to query jobInfo" + ExceptionUtils.getStackTrace(e), new HashMap<>()); + } + if (jobInfoResult.getUser() == null || jobInfoResult.getJobID() == null) { + return new KillResult(false, "user or jobID is null", new HashMap<>()); + } + if (jobInfoResult.getJobStatus() == null) { + return new KillResult(false, "jobStatus is null", new HashMap<>()); + } + if (jobInfoResult.getJobStatus().isJobCancelled()) { + String msg = "Kill job aborted: Job has already been canceled."; + return new KillResult(false, msg, new HashMap<>()); + } else if (jobInfoResult.getJobStatus().isJobFinishedState()) { + String msg = "Kill job aborted: Job is already in finished-state(SUCCEED/FAILED)."; + return new KillResult(false, msg, new HashMap<>()); + // throw new LinkisClientExecutionException(JobStatus.FAILED, "EXE0004", + // ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } else { + try { + LinkisOperResultAdapter jobKillResult = + oper.kill( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId()); + } catch (Exception e) { + return new KillResult( + false, + "Exception thrown when trying to send kill request. Messgae: " + + ExceptionUtils.getStackTrace(e), + new HashMap<>()); + } + String msg = "Kill request has been sent"; + LoggerManager.getPlaintTextLogger().info(msg); + int retryCnt = 0; + final int MAX_RETRY = 5 * 6; + while (!jobInfoResult.getJobStatus().isJobFinishedState() + && !jobInfoResult.getJobStatus().isJobCancelled()) { + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + try { + jobInfoResult = oper.queryJobInfo(jobInfoResult.getUser(), jobInfoResult.getJobID()); + retryCnt = 0; // if exception then will not go here + } catch (Exception e) { + retryCnt++; + CliUtils.doSleepQuietly(5 * CliConstants.JOB_QUERY_SLEEP_MILLS); + if (retryCnt >= MAX_RETRY) { + return new KillResult( + false, + MessageFormat.format( + "After send kill. Client cannot get jobStatus from server continuously for {0} seconds. Client aborted. Assume kill failed! Error message: \n", + MAX_RETRY * 5 * CliConstants.JOB_QUERY_SLEEP_MILLS), + new HashMap<>()); + } + } + } + if (jobInfoResult.getJobStatus().isJobFinishedState() + && !jobInfoResult.getJobStatus().isJobCancelled()) { + msg = "Kill Failed: Job Current status: " + jobInfoResult.getJobStatus(); + return new KillResult(false, msg, new HashMap<>()); + // throw new LinkisClientExecutionException(JobStatus.FAILED, + // "EXE0004", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg); + } else if (jobInfoResult.getJobStatus().isJobCancelled()) { + msg = + MessageFormat.format( + "Kill successful: jobId={0}, status={1}.", + jobInfoResult.getJobID(), jobInfoResult.getJobStatus()); + return new KillResult(true, msg, new HashMap<>()); + // LogUtils.getPlaintTextLogger().info(msg); + } else { + return new KillResult(false, "Unknown Error!!", new HashMap<>()); + } + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KeyParser.java similarity index 85% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtils.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KeyParser.java index 9527a8f513..01295ce676 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtils.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KeyParser.java @@ -15,27 +15,27 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.builder; +package org.apache.linkis.cli.application.interactor.job.common; -import org.apache.linkis.cli.application.constants.AppKeys; +import org.apache.linkis.cli.application.constants.CliKeys; import org.apache.commons.lang3.StringUtils; import java.util.HashMap; import java.util.Map; -public class ProcessKeyUtils { +public class KeyParser { public static Map removePrefixForKeysInMap(Map map) { final String[] PREFIX = new String[] { - AppKeys.JOB_PARAM_CONF, - AppKeys.JOB_PARAM_RUNTIME, - AppKeys.JOB_PARAM_VAR, - AppKeys.JOB_EXEC, - AppKeys.JOB_SOURCE, - AppKeys.JOB_LABEL, - AppKeys.JOB_CONTENT + CliKeys.JOB_PARAM_CONF, + CliKeys.JOB_PARAM_RUNTIME, + CliKeys.JOB_PARAM_VAR, + CliKeys.JOB_EXEC, + CliKeys.JOB_SOURCE, + CliKeys.JOB_LABEL, + CliKeys.JOB_CONTENT }; for (String prefix : PREFIX) { map = removePrefixForKeysInMap(map, prefix); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KillResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KillResult.java new file mode 100644 index 0000000000..732cd00ec7 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/KillResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class KillResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public KillResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LinkisJobStatus.java similarity index 97% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatus.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LinkisJobStatus.java index 9b1435da2a..dd4df9ee9b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatus.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LinkisJobStatus.java @@ -15,10 +15,10 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job; +package org.apache.linkis.cli.application.interactor.job.common; +import org.apache.linkis.cli.application.entity.job.JobStatus; import org.apache.linkis.cli.application.operator.once.LinkisNodeStatus; -import org.apache.linkis.cli.common.entity.job.JobStatus; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogData.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogData.java new file mode 100644 index 0000000000..5397bd04f1 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogData.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.LinkedBlockingDeque; + +public class LogData { + private String user; + private String jobId; + private String execId; + // log will be fetched and stored in LinkedBlockingDeque, while logRetriever consumes log in + // another thread + private LinkedBlockingDeque logBuffer = new LinkedBlockingDeque(); + private String logPath; // remote path for job log + private Integer nextLogLineIdx; // index of next log line to be fetched + private Boolean hasNextLogLine; // if there is still log to be retrieve + private volatile Boolean logFin = false; // if all log is finished + + public LogData(String user, String jobId, String execId) { + this.user = user; + this.jobId = jobId; + this.execId = execId; + } + + public String getUser() { + return user; + } + + public String getJobID() { + return jobId; + } + + public String getExecID() { + return execId; + } + + public final String getLogPath() { + return logPath; + } + + public final void setLogPath(String logPath) { + this.logPath = logPath; + } + + public String consumeLog() { + List logs = new LinkedList<>(); + this.logBuffer.drainTo(logs, this.logBuffer.size()); + StringBuilder tmp = new StringBuilder(); + for (String str : logs) { + tmp.append(str); + } + return tmp.toString(); + } + + public void appendLog(String log) { + this.logBuffer.add(log); + } + + public Integer getNextLogLineIdx() { + return nextLogLineIdx; + } + + public void setNextLogLineIdx(Integer nextLogLineIdx) { + this.nextLogLineIdx = nextLogLineIdx; + } + + public Boolean hasNextLogLine() { + return hasNextLogLine; + } + + public void setHasNextLogLine(Boolean hasNextLogLine) { + this.hasNextLogLine = hasNextLogLine; + } + + public void updateLog(LinkisOperResultAdapter adapter) { + if (adapter.getLogPath() != null) { + setLogPath(adapter.getLogPath()); + } + if (adapter.getLog() != null + && adapter.getNextLogLine() != null + && adapter.hasNextLogLine() != null) { + setNextLogLineIdx(adapter.getNextLogLine()); + setHasNextLogLine(adapter.hasNextLogLine()); + appendLog(adapter.getLog()); + } + } + + public void setLogFin() { + this.logFin = true; + } + + public Boolean isLogFin() { + return logFin; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogRetriever.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogRetriever.java new file mode 100644 index 0000000000..33943f1748 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/LogRetriever.java @@ -0,0 +1,202 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.event.LogStartEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import java.text.MessageFormat; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LogRetriever { + private static final Logger logger = LoggerFactory.getLogger(LogRetriever.class); + + private LinkisJobOper linkisJobOperator; + private LogData logData; + + private Boolean incLogMode; + + private LinkisClientListener logListener; + private LinkisClientEvent logStartEvent = new LogStartEvent(); + + public LogRetriever( + String user, + String jobId, + String execId, + Boolean incLogMode, + LinkisJobOper linkisJobOperator, + LinkisClientListener logListener) { + this.linkisJobOperator = linkisJobOperator; + this.logListener = logListener; + this.incLogMode = incLogMode; + this.logData = new LogData(user, jobId, execId); + registerLogListener(logListener); + } + + public void retrieveLogAsync() { + if (logData.getUser() == null || logData.getJobID() == null) { + throw new LinkisClientExecutionException( + "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); + } + try { + Thread logConsumer = new Thread(() -> notifyLogListener(), "Log-Consumer"); + Thread logRetriever = new Thread(() -> queryLogLoop(logData), "Log-Retriever"); + SchedulerManager.getCachedThreadPoolExecutor().execute(logRetriever); + SchedulerManager.getCachedThreadPoolExecutor().execute(logConsumer); + } catch (Exception e) { + logger.warn("Failed to retrieve log", e); + } + } + + public void waitIncLogComplete() { + int retry = 0; + int MAX_RETRY = 300; // wait for 10 minutes after job finish + while (retry++ < MAX_RETRY) { + if (this.logFinReceived()) { + return; + } + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + } + String msg = + "Job is in Finished state(SUCCEED/FAILED/CANCELED) but client keep querying inclog for " + + (MAX_RETRY * CliConstants.JOB_QUERY_SLEEP_MILLS / 1000) + + "seconds. Execution ends forcefully. Next will try handle execution result."; + logger.warn(msg); + LoggerManager.getInformationLogger().warn(msg); + } + + public void queryLogLoop(LogData data) { + int curLogIdx; + int nextLogIdx; + boolean hasNext = true; + int retryCnt = 0; + final int MAX_RETRY = 12; // continues fails for 90s, then exit thread + try { + while (hasNext) { + curLogIdx = data.getNextLogLineIdx() == null ? 0 : data.getNextLogLineIdx(); + try { + queryJobLogFromLine(data, curLogIdx); + } catch (Exception e) { + logger.error("Cannot get inc-log:", e); + // and yes sometimes server may not be able to prepare persisted-log + retryCnt++; + if (retryCnt >= MAX_RETRY) { + logger.error( + "Continuously failing to query inc-log for " + + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 + + "s. Will no longer try to query log", + e); + break; + } + CliUtils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer + continue; + } + retryCnt = 0; + nextLogIdx = data.getNextLogLineIdx() == null ? curLogIdx : data.getNextLogLineIdx(); + if (incLogMode) { + hasNext = data.hasNextLogLine() == null ? curLogIdx < nextLogIdx : data.hasNextLogLine(); + } else { + hasNext = curLogIdx < nextLogIdx; + } + if (curLogIdx >= nextLogIdx) { + String msg = + MessageFormat.format( + "Retrieving log, hasNext={0}, nextLogIdx={1}", hasNext, nextLogIdx); + logger.info(msg); + } + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + } + } catch (Exception e) { + logger.error("Something goes wrong. Job Log may be incomplete", e); + } finally { + sendLogFin(); + } + } + + private void queryJobLogFromLine(LogData data, int fromLine) throws LinkisClientRuntimeException { + + LinkisOperResultAdapter jobInfoResult = + linkisJobOperator.queryJobInfo(data.getUser(), data.getJobID()); + data.updateLog(jobInfoResult); + if (!jobInfoResult.getJobStatus().isJobFinishedState()) { + try { + data.updateLog( + linkisJobOperator.queryRunTimeLogFromLine( + data.getUser(), data.getJobID(), data.getExecID(), fromLine)); + } catch (Exception e) { + // job is finished while we start query log(but request is not send). + // then probably server cache is gone and we got a exception here. + // however we cannot know if this happens based on the exception message + logger.warn( + "Caught exception when querying runtime-log. Probably server-side has close stream. Will try openLog api if Job is completed.", + e); + if (jobInfoResult.getJobStatus().isJobFinishedState()) { + CliUtils.doSleepQuietly(500l); + data.updateLog( + linkisJobOperator.queryPersistedLogFromLine( + data.getLogPath(), data.getUser(), data.getJobID(), fromLine)); + } + } + } else { + try { + data.updateLog( + linkisJobOperator.queryPersistedLogFromLine( + data.getLogPath(), data.getUser(), data.getJobID(), fromLine)); + } catch (Exception e) { + logger.error("Cannot get persisted-inc-log:", e); + // and yes sometimes server may not be able to prepare persisted-log + throw e; + } + } + } + + public Boolean isIncLogMode() { + return incLogMode; + } + + public void registerLogListener(LinkisClientListener observer) { + this.logStartEvent.register(observer); + } + + public void notifyLogListener() { + if (this.logStartEvent.isRegistered()) { + logStartEvent.notifyObserver(logStartEvent, this.logData); + } + } + + public void sendLogFin() { + this.logData.setLogFin(); + } + + public boolean logFinReceived() { + return this.logData.isLogFin(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultData.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultData.java new file mode 100644 index 0000000000..8c9bec028b --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultData.java @@ -0,0 +1,165 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; + +import java.util.LinkedList; +import java.util.List; +import java.util.concurrent.LinkedBlockingDeque; + +public class ResultData { + + private final String user; + private final String jobId; + private final String execId; + private final LinkedBlockingDeque resultContent = new LinkedBlockingDeque<>(); + private String extraMessage; + private JobStatus jobStatus = null; + private String resultLocation; + private String[] resultSetPaths = null; // remote paths for job result set + private Boolean hasNextResultPage; + private Integer errCode = null; + private String errDesc = null; + private boolean hasResult = true; + + private volatile Boolean resultFin = false; + + public ResultData(String user, String jobId, String execId) { + this.user = user; + this.jobId = jobId; + this.execId = execId; + } + + public String getJobID() { + return jobId; + } + + public String getUser() { + return user; + } + + public final String getExecID() { + return execId; + } + + public final String getResultLocation() { + return resultLocation; + } + + public final void setResultLocation(String resultLocation) { + this.resultLocation = resultLocation; + } + + public String[] getResultSetPaths() { + return resultSetPaths; + } + + public final void setResultSetPaths(String[] resultSetPaths) { + this.resultSetPaths = resultSetPaths; + } + + public Integer getErrCode() { + return errCode; + } + + public void setErrCode(Integer errCode) { + this.errCode = errCode; + } + + public String getErrDesc() { + return errDesc; + } + + public void setErrDesc(String errDesc) { + this.errDesc = errDesc; + } + + public List consumeResultContent() { + List ret = new LinkedList<>(); + resultContent.drainTo(ret, resultContent.size()); + return ret; + } + + public void appendResultContent(ResultSet resultContent) { + this.resultContent.add(resultContent); + } + + public Boolean hasNextResultPage() { + return hasNextResultPage; + } + + public void setHasNextResultPage(Boolean hasNextResultPage) { + this.hasNextResultPage = hasNextResultPage; + } + + public void setResultFin() { + this.resultFin = true; + } + + public boolean isResultFin() { + return this.resultFin; + } + + public boolean hasResult() { + return hasResult; + } + + public void setHasResult(boolean hasResult) { + this.hasResult = hasResult; + } + + public JobStatus getJobStatus() { + return jobStatus; + } + + public void setJobStatus(JobStatus jobStatus) { + this.jobStatus = jobStatus; + } + + public String getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(String extraMessage) { + this.extraMessage = extraMessage; + } + + public void updateByOperResult(LinkisOperResultAdapter adapter) { + if (adapter.getResultLocation() != null) { + setResultLocation(adapter.getResultLocation()); + } + if (adapter.getResultSetPaths() != null) { + setResultSetPaths(adapter.getResultSetPaths()); + } + if (adapter.getErrCode() != null) { + setErrCode(adapter.getErrCode()); + } + if (adapter.getErrDesc() != null) { + setErrDesc(adapter.getErrDesc()); + } + if (adapter.getResultContent() != null && adapter.resultHasNextPage() != null) { + setHasNextResultPage(adapter.resultHasNextPage()); + appendResultContent(adapter.getResultContent()); + } + if (adapter.getJobStatus() != null) { + setJobStatus(adapter.getJobStatus()); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultRetriever.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultRetriever.java new file mode 100644 index 0000000000..9997dd1f41 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultRetriever.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.common; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.observer.event.FetchResultEvent; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import org.apache.commons.lang3.StringUtils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ResultRetriever { + private static final Logger logger = LoggerFactory.getLogger(ResultRetriever.class); + + private LinkisJobOper linkisJobOperator; + private ResultData resultData; + + private LinkisClientListener resultListener; + private LinkisClientEvent fetchResultEvent = new FetchResultEvent(); + + public ResultRetriever( + String user, + String jobId, + String execId, + LinkisJobOper linkisJobOperator, + LinkisClientListener resultListener) { + this.linkisJobOperator = linkisJobOperator; + this.resultListener = resultListener; + this.resultData = new ResultData(user, jobId, execId); + registerResultListener(resultListener); + } + + public void retrieveResultSync() { + if (resultData.getUser() == null || resultData.getJobID() == null) { + throw new LinkisClientExecutionException( + "EXE0036", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "user or jobID is null"); + } + resultData.updateByOperResult( + linkisJobOperator.queryJobInfo(resultData.getUser(), resultData.getJobID())); + if (resultData.getJobStatus() == null) { + throw new LinkisClientExecutionException( + "EXE0038", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, "jobStatus is null"); + } + if (!resultData.getJobStatus().isJobSuccess()) { + LoggerManager.getInformationLogger() + .info( + "Job status is not success but \'" + + resultData.getJobStatus() + + "\'. Will not try to retrieve any Result"); + resultData.setResultFin(); // inform listener to stop + return; + } + if (StringUtils.isBlank(resultData.getResultLocation())) { + throw new LinkisClientExecutionException( + "EXE0037", + ErrorLevel.WARN, + CommonErrMsg.ExecutionErr, + "Got blank ResultLocation from server. Job may not have result-set. Will not try to retrieve any Result"); + } + resultData.updateByOperResult( + linkisJobOperator.queryResultSetPaths( + resultData.getUser(), resultData.getJobID(), resultData.getResultLocation())); + + if (resultData.getResultSetPaths() == null || resultData.getResultSetPaths().length == 0) { + String msg = "Your job got no result."; + logger.warn(msg); + resultData.setResultFin(); // inform listener to stop + resultData.setHasResult(false); + return; + } + + try { + resultData.setHasResult(true); + // Thread resultConsumer = new Thread(() -> notifyResultListener()); + Thread resultThread = new Thread(() -> queryResultLoop(resultData), "Result-Retrieve-Thread"); + // SchedulerUtils.getCachedThreadPoolExecutor().execute(resultConsumer); + SchedulerManager.getCachedThreadPoolExecutor().execute(resultThread); + notifyResultListener(); + } catch (Exception e) { + logger.error("Failed to retrieve result", e); + throw e; + } + } + + public void queryResultLoop(ResultData data) { + boolean hasNext = true; + int retryCnt = 0; + final int MAX_RETRY = 30; // continues fails for 250s, then exit + int idx = 0; + try { + while (hasNext) { + try { + hasNext = queryOneResult(data, idx); + } catch (LinkisClientRuntimeException e) { + logger.error("Cannot get result:", e); + retryCnt++; + if (retryCnt >= MAX_RETRY) { + logger.error( + "Continuously failing to query result for " + + MAX_RETRY * (MAX_RETRY + 2) * 500 / 1000 + + "s. Will no longer try to query result", + e); + return; + } else { + hasNext = true; + } + CliUtils.doSleepQuietly(500l + 500l * retryCnt); // maybe server problem. sleep longer + continue; + } + idx++; + } + } catch (Exception e) { + logger.error("Something goes wrong. Job Result may be incomplete", e); + throw e; + } finally { + data.setResultFin(); + } + } + + private boolean queryOneResult(ResultData data, int idxResultSet) { + Integer curPage = 1; + boolean hasNextResult = true; + boolean hasNextPage = true; + while (hasNextPage) { + data.updateByOperResult( + linkisJobOperator.queryResultSetGivenResultSetPath( + data.getResultSetPaths(), + idxResultSet, + data.getUser(), + curPage, + CliConstants.RESULTSET_PAGE_SIZE)); + if (data.hasNextResultPage() == null) { + throw new LinkisClientExecutionException( + "EXE0040", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionResultErr, + "Something foes wrong. Got null as \'hasNextPage\'."); + } + hasNextPage = data.hasNextResultPage(); + + curPage++; + hasNextResult = idxResultSet + 1 < data.getResultSetPaths().length; + } + return hasNextResult; + } + + public void registerResultListener(LinkisClientListener observer) { + this.fetchResultEvent.register(observer); + } + + public void notifyResultListener() { + if (this.fetchResultEvent.isRegistered()) { + fetchResultEvent.notifyObserver(fetchResultEvent, this.resultData); + } + } + + public void setResultFin() { + this.resultData.setResultFin(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultSet.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultSet.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultSet.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultSet.java index 4fa1dcfe02..5821e14a55 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultSet.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/common/ResultSet.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.data; +package org.apache.linkis.cli.application.interactor.job.common; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; -public class LinkisResultSet implements Cloneable { +public class ResultSet implements Cloneable { private int resultsetIdx; private List> resultMeta; private List> content; - public LinkisResultSet() {} + public ResultSet() {} public int getResultsetIdx() { return resultsetIdx; @@ -53,8 +53,8 @@ public void setContent(List> content) { } @Override - protected LinkisResultSet clone() throws CloneNotSupportedException { - LinkisResultSet ret = new LinkisResultSet(); + protected ResultSet clone() throws CloneNotSupportedException { + ResultSet ret = new ResultSet(); if (this.resultMeta != null) { List> resultMeta = null; ret.resultMeta = new LinkedList<>(); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/help/HelpJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/help/HelpJob.java new file mode 100644 index 0000000000..943c54ed63 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/help/HelpJob.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.help; + +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.present.HelpPresenter; +import org.apache.linkis.cli.application.present.model.HelpInfoModel; + +import java.util.HashMap; +import java.util.Map; + +public class HelpJob implements Job { + private CliCtx ctx; + + @Override + public void build(CliCtx ctx) { + this.ctx = ctx; + } + + @Override + public JobResult run() { + HelpInfoModel model = new HelpInfoModel(); + model.buildModel(ctx.getTemplate()); + new HelpPresenter().present(model); + return new JobResult() { + @Override + public Boolean isSuccess() { + return true; + } + + @Override + public String getMessage() { + return ""; + } + + @Override + public Map getExtraMessage() { + return new HashMap<>(); + } + }; + } + + @Override + public void onDestroy() {} +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJob.java new file mode 100644 index 0000000000..d4d02a4e9c --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJob.java @@ -0,0 +1,263 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.JobKiller; +import org.apache.linkis.cli.application.interactor.job.common.LogRetriever; +import org.apache.linkis.cli.application.interactor.job.common.ResultRetriever; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.present.LogPresenter; +import org.apache.linkis.cli.application.present.ResultPresenter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class InteractiveJob implements Job { + + private static final Logger logger = LoggerFactory.getLogger(InteractiveJob.class); + + private CliCtx ctx; + + private Boolean isAsync = false; + + private LinkisJobOper oper; + + private InteractiveJobDesc desc; + + private String username; + + private String jobId; + + @Override + public void build(CliCtx ctx) { + this.ctx = ctx; + this.isAsync = + ctx.getVarAccess().getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + this.desc = InteractiveJobDescBuilder.build(ctx); + this.oper = (LinkisJobOper) OperManager.getNew(CliKeys.Linkis_OPER, ctx); + } + + @Override + public JobResult run() { + + // Indicator + StringBuilder infoBuilder = new StringBuilder(); + infoBuilder.append("connecting to linkis gateway:").append(oper.getServerUrl()); + LoggerManager.getInformationLogger().info(infoBuilder.toString()); + infoBuilder.setLength(0); + + // Submit + LinkisOperResultAdapter submitResult = oper.submit(desc); + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + + // JobInfo + LinkisOperResultAdapter jobInfoResult = + oper.queryJobInfo(submitResult.getUser(), submitResult.getJobID()); + oper.queryJobStatus( + jobInfoResult.getUser(), jobInfoResult.getJobID(), jobInfoResult.getStrongerExecId()); + infoBuilder.setLength(0); + infoBuilder + .append("JobId:") + .append(jobInfoResult.getJobID()) + .append(System.lineSeparator()) + .append("TaskId:") + .append(jobInfoResult.getJobID()) + .append(System.lineSeparator()) + .append("ExecId:") + .append(jobInfoResult.getStrongerExecId()); + LoggerManager.getPlaintTextLogger().info(infoBuilder.toString()); + infoBuilder.setLength(0); + + // Submit success or not + if (!jobInfoResult.getJobStatus().isJobSubmitted()) { + return new InteractiveJobResult(false, "Failed to submit job", new HashMap<>()); + } else { + // Output that job is submitted + infoBuilder.append("Job is successfully submitted!").append(System.lineSeparator()); + LoggerManager.getInformationLogger().info(infoBuilder.toString()); + infoBuilder.setLength(0); + username = submitResult.getUser(); + jobId = submitResult.getJobID(); + } + + // async job, return + if (isAsync) { + return new InteractiveJobResult( + submitResult.getJobStatus().isJobSubmitted(), + "Async Submission Success", + new HashMap<>()); + } + + CliUtils.doSleepQuietly(2000l); + + // get log while running + LogRetriever logRetriever = + new LogRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId(), + true, + oper, + new LogPresenter()); + // async because we need to query job status + logRetriever.retrieveLogAsync(); + + // wait complete + jobInfoResult = waitJobComplete(submitResult.getUser(), submitResult.getJobID()); + logRetriever.waitIncLogComplete(); + + // get result-set + String outputPath = + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); + ResultPresenter presenter; + if (StringUtils.isBlank(outputPath)) { + presenter = new ResultPresenter(); + } else { + presenter = new ResultPresenter(true, outputPath); + } + + ResultRetriever resultRetriever = + new ResultRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId(), + oper, + presenter); + + JobResult result = getResult(jobInfoResult, resultRetriever); + + return result; + } + + private JobResult getResult( + LinkisOperResultAdapter jobInfoResult, ResultRetriever resultRetriever) + throws LinkisClientRuntimeException { + if (!jobInfoResult.getJobStatus().isJobSuccess()) { + LoggerManager.getInformationLogger() + .info( + "Job status is not success but \'" + + jobInfoResult.getJobStatus() + + "\'. Will not try to retrieve any Result"); + Map extraMap = new HashMap<>(); + if (jobInfoResult.getErrCode() != null) { + extraMap.put("errorCode", String.valueOf(jobInfoResult.getErrCode())); + } + if (StringUtils.isNotBlank(jobInfoResult.getErrDesc())) { + extraMap.put("errorDesc", jobInfoResult.getErrDesc()); + } + return new InteractiveJobResult(false, "Execute Error!!!", extraMap); + } + InteractiveJobResult result = + new InteractiveJobResult(true, "Execute Success!!!", new HashMap<>()); + try { + resultRetriever.retrieveResultSync(); + result.setSuccess(true); + result.setMessage("execute success!!!"); + } catch (LinkisClientExecutionException e) { + if (e.getCode().equals("EXE0037")) { + result.setSuccess(true); + result.setMessage("execute success!!!"); + LoggerManager.getInformationLogger().warn(e.getMessage()); + } else { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + } + resultRetriever.setResultFin(); // inform listener to stop + } catch (Exception e) { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + resultRetriever.setResultFin(); // inform listener to stop + } + return result; + } + + private LinkisOperResultAdapter waitJobComplete(String user, String jobId) + throws LinkisClientRuntimeException { + int retryCnt = 0; + final int MAX_RETRY = 30; + + LinkisOperResultAdapter jobInfoResult = oper.queryJobInfo(user, jobId); + oper.queryJobStatus(user, jobId, jobInfoResult.getStrongerExecId()); + + while (!jobInfoResult.getJobStatus().isJobFinishedState()) { + // query progress + try { + jobInfoResult = oper.queryJobInfo(user, jobId); + oper.queryJobStatus(user, jobId, jobInfoResult.getStrongerExecId()); + } catch (Exception e) { + logger.warn("", e); + retryCnt++; + if (retryCnt >= MAX_RETRY) { + throw new LinkisClientExecutionException( + "EXE0013", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Cannot get jobStatus from server continuously for {0} seconds. Client aborted! Error message: \n", + MAX_RETRY * 5 * CliConstants.JOB_QUERY_SLEEP_MILLS, + e); + } + CliUtils.doSleepQuietly( + 5 * CliConstants.JOB_QUERY_SLEEP_MILLS); // maybe server problem. sleep + // longer + continue; + } + retryCnt = 0; // reset counter + if (jobInfoResult.getJobStatus().isJobAbnormalStatus()) { + throw new LinkisClientExecutionException( + "EXE0006", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Job is in abnormal status: " + CliUtils.GSON.toJson(jobInfoResult)); + } + CliUtils.doSleepQuietly(CliConstants.JOB_QUERY_SLEEP_MILLS); + } + return jobInfoResult; + } + + @Override + public void onDestroy() { + if (StringUtils.isBlank(username) || StringUtils.isBlank(jobId)) { + logger.warn("Failed to kill job username or jobId is blank"); + return; + } + try { + new JobKiller(oper).doKill(username, jobId); + } catch (Exception e) { + logger.error("Failed to kill job", e); + } + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisSubmitDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisSubmitDesc.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java index 49cdd1a547..e594d9cc23 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisSubmitDesc.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDesc.java @@ -15,13 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.desc; - -import org.apache.linkis.cli.common.entity.job.JobDescription; +package org.apache.linkis.cli.application.interactor.job.interactive; import java.util.Map; -public class LinkisSubmitDesc implements JobDescription { +public class InteractiveJobDesc { private String submitUser; private String proxyUser; private String creator; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java new file mode 100644 index 0000000000..2b0b20188a --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.interactor.job.common.KeyParser; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import java.util.*; + +public class InteractiveJobDescBuilder { + + public static InteractiveJobDesc build(CliCtx ctx) { + InteractiveJobDesc desc = new InteractiveJobDesc(); + + VarAccess stdVarAccess = ctx.getVarAccess(); + + Map confMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_PARAM_CONF); + Map runtimeMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_PARAM_RUNTIME); + Map varMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_PARAM_VAR); + Map labelMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_LABEL); + Map sourceMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_SOURCE); + Map executionMap = stdVarAccess.getVar(Map.class, CliKeys.JOB_EXEC); + + confMap = confMap == null ? new HashMap<>() : confMap; + runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; + varMap = varMap == null ? new HashMap<>() : varMap; + labelMap = labelMap == null ? new HashMap<>() : labelMap; + sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; + executionMap = executionMap == null ? new HashMap<>() : executionMap; + + /** remove key prefix of all keys in map type params. e.g. kv in confMap, labelMap etc. */ + confMap = KeyParser.removePrefixForKeysInMap(confMap); + runtimeMap = KeyParser.removePrefixForKeysInMap(runtimeMap); + labelMap = KeyParser.removePrefixForKeysInMap(labelMap); + sourceMap = KeyParser.removePrefixForKeysInMap(sourceMap); + executionMap = KeyParser.removePrefixForKeysInMap(executionMap); + + /** remove key prefix of non-map type params */ + for (String key : stdVarAccess.getAllVarKeys()) { + Object val = stdVarAccess.getVar(Object.class, key); + if (!(val instanceof Map) && val != null) { + // note that we allow it to overwrite existing values in map + if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_CONF)) { + KeyParser.removePrefixAndPutValToMap(confMap, key, val, CliKeys.JOB_PARAM_CONF); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_VAR)) { + KeyParser.removePrefixAndPutValToMap(varMap, key, val, CliKeys.JOB_PARAM_VAR); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_RUNTIME)) { + KeyParser.removePrefixAndPutValToMap(runtimeMap, key, val, CliKeys.JOB_PARAM_RUNTIME); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_EXEC)) { + KeyParser.removePrefixAndPutValToMap(executionMap, key, val, CliKeys.JOB_EXEC); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_LABEL)) { + KeyParser.removePrefixAndPutValToMap(labelMap, key, val, CliKeys.JOB_LABEL); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_SOURCE)) { + KeyParser.removePrefixAndPutValToMap(sourceMap, key, val, CliKeys.JOB_SOURCE); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.LINKIS_CLIENT_COMMON)) { + // do nothing + } else { + // confMap.put(key, stdVarAccess.getVar(Object.class, key)); + } + } + } + + Boolean asyncSubmission = + stdVarAccess.getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + + String creator; + if (!asyncSubmission) { + creator = + stdVarAccess.getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_DEFAULT); + } else { + creator = + stdVarAccess.getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_ASYNC_DEFAULT); + } + String code = stdVarAccess.getVar(String.class, CliKeys.JOB_EXEC_CODE); + String engineType = stdVarAccess.getVar(String.class, CliKeys.JOB_LABEL_ENGINE_TYPE); + String runType = stdVarAccess.getVar(String.class, CliKeys.JOB_LABEL_CODE_TYPE); + String scriptPath = + stdVarAccess.getVarOrDefault(String.class, CliKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); + + String osUser = System.getProperty(CliKeys.LINUX_USER_KEY); + String[] adminUsers = StringUtils.split(CliKeys.ADMIN_USERS, ','); + Set adminSet = new HashSet<>(); + for (String admin : adminUsers) { + adminSet.add(admin); + } + String submitUsr = CliUtils.getSubmitUser(stdVarAccess, osUser, adminSet); + String proxyUsr = CliUtils.getProxyUser(stdVarAccess, submitUsr, adminSet); + + String enableExecuteOnce = + stdVarAccess.getVarOrDefault(String.class, CliKeys.JOB_LABEL_EXECUTEONCE, "true"); + // default executeOnce-mode + if (Boolean.parseBoolean(enableExecuteOnce)) { + labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); + } else { + labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); + } + String codePath = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_CODE_PATH); + Object extraArgsObj = stdVarAccess.getVar(Object.class, CliKeys.JOB_EXTRA_ARGUMENTS); + if (extraArgsObj != null + && extraArgsObj instanceof String[] + && StringUtils.isBlank(code) + && StringUtils.isBlank(codePath)) { + String[] extraArgs = (String[]) extraArgsObj; + codePath = extraArgs[0]; + if (extraArgs.length > 1) { + runtimeMap.put( + LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); + } + } + + if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { + try { + code = CliUtils.readFile(codePath); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to read file", e); + throw e; + } + } + + executionMap.put(LinkisKeys.KEY_CODE, code); + labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); + labelMap.put(LinkisKeys.KEY_CODETYPE, runType); + labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); + sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + if (ctx.getExtraMap().containsKey(CliKeys.VERSION)) { + sourceMap.put(LinkisKeys.CLI_VERSION, ctx.getExtraMap().get(CliKeys.VERSION)); + } + runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); + + desc.setCreator(creator); + desc.setParamConfMap(confMap); + desc.setParamRunTimeMap(runtimeMap); + desc.setParamVarsMap(varMap); + desc.setLabelMap(labelMap); + desc.setSourceMap(sourceMap); + desc.setExecutionMap(executionMap); + desc.setSubmitUser(submitUsr); + desc.setProxyUser(proxyUsr); + + return desc; + } + + public static LinkisJobOper generateOperator(CliCtx ctx) { + LinkisJobOper linkisJobOperator = new LinkisJobOper(); + linkisJobOperator.setUJESClient(UJESClientFactory.getReusable(ctx.getVarAccess())); + linkisJobOperator.setServerUrl( + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL)); + return linkisJobOperator; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobResult.java new file mode 100644 index 0000000000..92999169ed --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.interactive; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class InteractiveJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public InteractiveJobResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisJobManDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDesc.java similarity index 82% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisJobManDesc.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDesc.java index da4f5d1c47..99f0365004 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisJobManDesc.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDesc.java @@ -15,18 +15,25 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.desc; - -import org.apache.linkis.cli.common.entity.job.JobDescription; +package org.apache.linkis.cli.application.interactor.job.jobcmd; import java.util.Map; -public class LinkisJobManDesc implements JobDescription { +public class JobCmdDesc { private String jobId; private String user; + private JobCmdSubType subType; private Map params; + public JobCmdSubType getSubType() { + return subType; + } + + public void setSubType(JobCmdSubType subType) { + this.subType = subType; + } + public String getJobID() { return jobId; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDescBuilder.java new file mode 100644 index 0000000000..1e52e6e532 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdDescBuilder.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.utils.CliUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.util.HashSet; +import java.util.Set; + +public class JobCmdDescBuilder { + public static JobCmdDesc build(CliCtx ctx) { + JobCmdDesc desc = new JobCmdDesc(); + String osUser = System.getProperty(CliKeys.LINUX_USER_KEY); + String[] adminUsers = StringUtils.split(CliKeys.ADMIN_USERS, ','); + Set adminSet = new HashSet<>(); + for (String admin : adminUsers) { + adminSet.add(admin); + } + String submitUsr = CliUtils.getSubmitUser(ctx.getVarAccess(), osUser, adminSet); + + JobCmdSubType subType = null; + + String jobId = null; + if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_KILL_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_KILL_OPT); + subType = JobCmdSubType.KILL; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_STATUS_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_STATUS_OPT); + subType = JobCmdSubType.STATUS; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_DESC_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_DESC_OPT); + subType = JobCmdSubType.DESC; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LOG_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_LOG_OPT); + subType = JobCmdSubType.LOG; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_RESULT_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_RESULT_OPT); + subType = JobCmdSubType.RESULT; + } else if (ctx.getVarAccess().hasVar(CliKeys.LINKIS_CLIENT_LIST_OPT)) { + jobId = ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_LIST_OPT); + subType = JobCmdSubType.LIST; + } + desc.setSubType(subType); + desc.setJobId(jobId); + desc.setUser(submitUsr); + return desc; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJob.java new file mode 100644 index 0000000000..e39be38f42 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJob.java @@ -0,0 +1,212 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.JobKiller; +import org.apache.linkis.cli.application.interactor.job.common.KillResult; +import org.apache.linkis.cli.application.interactor.job.common.LogRetriever; +import org.apache.linkis.cli.application.interactor.job.common.ResultRetriever; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; +import org.apache.linkis.cli.application.present.JobInfoPresenter; +import org.apache.linkis.cli.application.present.LogPresenter; +import org.apache.linkis.cli.application.present.ResultPresenter; +import org.apache.linkis.cli.application.present.model.LinkisJobInfoModel; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; +import org.apache.commons.lang3.exception.ExceptionUtils; + +import java.util.HashMap; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class JobCmdJob implements Job { + private static final Logger logger = LoggerFactory.getLogger(JobCmdJob.class); + + protected CliCtx ctx; + + protected LinkisJobOper oper; + + protected JobCmdDesc desc; + + @Override + public void build(CliCtx ctx) { + this.ctx = ctx; + this.desc = JobCmdDescBuilder.build(ctx); + this.oper = (LinkisJobOper) OperManager.getNew(CliKeys.Linkis_OPER, ctx); + } + + @Override + public JobResult run() { + JobCmdSubType subType = desc.getSubType(); + if (!(subType instanceof JobCmdSubType)) { + throw new LinkisClientExecutionException( + "EXE0030", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "JobSubType is not instance of JobManSubType"); + } + JobCmdJobResult result = new JobCmdJobResult(true, "Execute Success!!!", new HashMap<>()); + + switch (subType) { + case STATUS: + try { + LinkisOperResultAdapter jobInfoResult = + oper.queryJobInfo(desc.getUser(), desc.getJobID()); + LinkisJobInfoModel model = new LinkisJobInfoModel(); + model.buildModel(jobInfoResult); + new JobInfoPresenter().present(model); + } catch (Exception e) { + result.setSuccess(false); + result.setMessage(ExceptionUtils.getStackTrace(e)); + } + if (!result.isSuccess()) { + LoggerManager.getPlaintTextLogger() + .error("Failed to get job-info. Message: " + result.getMessage()); + } + return result; + case LOG: + try { + // get log while running + LinkisOperResultAdapter jobInfoResult = + oper.queryJobInfo(desc.getUser(), desc.getJobID()); + LogRetriever logRetriever = + new LogRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId(), + false, + oper, + new LogPresenter()); + // async because we need to query job status + logRetriever.retrieveLogAsync(); + logRetriever.waitIncLogComplete(); + } catch (Exception e) { + result.setSuccess(false); + result.setMessage(ExceptionUtils.getStackTrace(e)); + } + if (!result.isSuccess()) { + LoggerManager.getInformationLogger() + .error("Failed to get log. Message: " + result.getMessage()); + } + return result; + case RESULT: + // get log while running + LinkisOperResultAdapter jobInfoResult = oper.queryJobInfo(desc.getUser(), desc.getJobID()); + // get result-set + String outputPath = + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_CLIENT_COMMON_OUTPUT_PATH); + ResultPresenter presenter; + if (StringUtils.isBlank(outputPath)) { + presenter = new ResultPresenter(); + } else { + presenter = new ResultPresenter(true, outputPath); + } + + ResultRetriever resultRetriever = + new ResultRetriever( + jobInfoResult.getUser(), + jobInfoResult.getJobID(), + jobInfoResult.getStrongerExecId(), + oper, + presenter); + + result = getResult(resultRetriever); + if (!result.isSuccess()) { + LoggerManager.getInformationLogger() + .error("Failed to get result. Message: " + result.getMessage()); + } + return result; + case KILL: + JobKiller jobKiller = new JobKiller(oper); + KillResult killResult; + try { + killResult = jobKiller.doKill(desc.getUser(), desc.getJobID()); + } catch (Exception e) { + killResult = + new KillResult( + false, + "Failed to kill job. Messgae: " + ExceptionUtils.getStackTrace(e), + new HashMap<>()); + } + if (killResult.isSuccess()) { + LoggerManager.getPlaintTextLogger().info("Kill Success. Current job-info:"); + } else { + LoggerManager.getPlaintTextLogger() + .error("Kill Failed. Messgae: " + killResult.getMessage() + "\n Current job-info:"); + } + try { + LinkisOperResultAdapter jobInfoResult2 = + oper.queryJobInfo(desc.getUser(), desc.getJobID()); + LinkisJobInfoModel model = new LinkisJobInfoModel(); + model.buildModel(jobInfoResult2); + new JobInfoPresenter().present(model); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to get jobInfo", e); + } + return new JobCmdJobResult( + killResult.isSuccess(), killResult.getMessage(), killResult.getExtraMessage()); + // case LIST: + // break; + // case JOB_DESC: + // break; + default: + return new JobCmdJobResult( + false, "JobSubType + \"" + subType + "\" is not supported", new HashMap<>()); + } + } + + private JobCmdJobResult getResult(ResultRetriever resultRetriever) + throws LinkisClientRuntimeException { + JobCmdJobResult result = new JobCmdJobResult(true, "Execute Success!!!", new HashMap<>()); + try { + resultRetriever.retrieveResultSync(); + result.setSuccess(true); + result.setMessage("execute success!!!"); + } catch (LinkisClientExecutionException e) { + if (e.getCode().equals("EXE0037")) { + result.setSuccess(true); + result.setMessage("execute success!!!"); + LoggerManager.getInformationLogger().warn(e.getMessage()); + } else { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + } + resultRetriever.setResultFin(); // inform listener to stop + } catch (Exception e) { + result.setSuccess(false); + result.setMessage("execute failed!!!\n" + ExceptionUtils.getStackTrace(e)); + resultRetriever.setResultFin(); // inform listener to stop + } + return result; + } + + @Override + public void onDestroy() {} +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJobResult.java new file mode 100644 index 0000000000..c5b9b43535 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdJobResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.jobcmd; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class JobCmdJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public JobCmdJobResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdSubType.java similarity index 81% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubType.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdSubType.java index 5df4b07976..b2d298995a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubType.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/jobcmd/JobCmdSubType.java @@ -15,11 +15,9 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.subtype; +package org.apache.linkis.cli.application.interactor.job.jobcmd; -import org.apache.linkis.cli.common.entity.job.JobSubType; - -public enum LinkisManSubType implements JobSubType { +public enum JobCmdSubType { KILL("kill"), LOG("log"), DESC("desc"), @@ -29,11 +27,10 @@ public enum LinkisManSubType implements JobSubType { private String name; - LinkisManSubType(String name) { + JobCmdSubType(String name) { this.name = name; } - @Override public String getName() { return this.name; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/LinkisOnceJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/LinkisOnceJob.java new file mode 100644 index 0000000000..fac387998e --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/LinkisOnceJob.java @@ -0,0 +1,106 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.once; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.operator.OperManager; +import org.apache.linkis.cli.application.operator.once.OnceJobOper; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; +import org.apache.linkis.cli.application.utils.SchedulerManager; + +import java.util.HashMap; +import java.util.concurrent.CountDownLatch; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class LinkisOnceJob implements Job { + + private static final Logger logger = LoggerFactory.getLogger(LinkisOnceJob.class); + + private Boolean isAsync = false; + private OnceJobOper oper; + + @Override + public void build(CliCtx ctx) { + this.isAsync = + ctx.getVarAccess().getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + oper = (OnceJobOper) OperManager.getNew(CliKeys.LINKIS_ONCE, ctx); + } + + @Override + public JobResult run() { + StringBuilder infoBuilder = new StringBuilder(); + infoBuilder.append("connecting to linkis gateway:").append(oper.getServerUrl()); + LoggerManager.getInformationLogger().info(infoBuilder.toString()); + + /** submit */ + oper.submit(); + JobStatus jobStatus = oper.getStatus(); + infoBuilder.setLength(0); + infoBuilder.append("JobId:").append(oper.getJobID()).append(System.lineSeparator()); + LoggerManager.getPlaintTextLogger().info(infoBuilder.toString()); + if (isAsync && jobStatus != null && jobStatus.isJobSubmitted()) { + return new OnceJobResult(true, "Submit Success!!!", new HashMap<>()); + } + + /** getLog */ + CountDownLatch latch = new CountDownLatch(1); + try { + Thread logConsumer = new Thread(() -> ProcessLog(latch), "Log-Consumer"); + SchedulerManager.getCachedThreadPoolExecutor().execute(logConsumer); + } catch (Exception e) { + logger.warn("Failed to retrieve log", e); + } + + /** wait complete */ + oper.waitForComplete(); + try { + latch.await(); + } catch (Exception e) { + // ignore + } + + JobStatus finalStatus = oper.getStatus(); + + if (finalStatus.isJobSuccess()) { + return new OnceJobResult(true, "Execute Success!!!", new HashMap<>()); + } else { + return new OnceJobResult(false, "Execute Failure!!!", new HashMap<>()); + } + } + + @Override + public void onDestroy() { + oper.kill(); + } + + private void ProcessLog(CountDownLatch latch) { + while (!oper.isLogFin()) { + String log = oper.getCurrentLog(); + LoggerManager.getPlaintTextLogger().info(log); + CliUtils.doSleepQuietly(2000l); + } + latch.countDown(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/OnceJobResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/OnceJobResult.java new file mode 100644 index 0000000000..2a14f76a6f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/once/OnceJobResult.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.once; + +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class OnceJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMessage; + + public OnceJobResult(Boolean success, String message, Map extraMessage) { + this.success = success; + this.message = message; + this.extraMessage = extraMessage; + } + + @Override + public Boolean isSuccess() { + return success; + } + + @Override + public String getMessage() { + return message; + } + + public void setMessage(String message) { + this.message = message; + } + + @Override + public Map getExtraMessage() { + return extraMessage; + } + + public void setExtraMessage(Map extraMessage) { + this.extraMessage = extraMessage; + } + + public void setSuccess(Boolean success) { + this.success = success; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJob.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJob.java new file mode 100644 index 0000000000..599f97904f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJob.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.interactor.job.version; + +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.job.Job; +import org.apache.linkis.cli.application.entity.job.JobResult; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import java.util.HashMap; +import java.util.Map; + +public class VersionJob implements Job { + private CliCtx ctx; + + @Override + public void build(CliCtx cliCtx) { + this.ctx = cliCtx; + } + + @Override + public JobResult run() { + String version = (String) ctx.getExtraMap().get(CliKeys.VERSION); + Map extraMap = new HashMap<>(); + extraMap.put(CliKeys.VERSION, version); + LoggerManager.getPlaintTextLogger().info("Version=" + version); + return new VersionJobResult(true, "ok", extraMap); + } + + @Override + public void onDestroy() {} +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJobResult.java similarity index 55% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorBuilder.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJobResult.java index 8727ec495e..e2f12cd7c2 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/operator/JobOperatorBuilder.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/version/VersionJobResult.java @@ -15,20 +15,35 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.operator; +package org.apache.linkis.cli.application.interactor.job.version; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.core.builder.BuildableByVarAccess; +import org.apache.linkis.cli.application.entity.job.JobResult; + +import java.util.Map; + +public class VersionJobResult implements JobResult { + private Boolean success; + private String message; + private Map extraMsg; + + public VersionJobResult(Boolean success, String message, Map extraMsg) { + this.success = success; + this.message = message; + this.extraMsg = extraMsg; + } + + @Override + public Boolean isSuccess() { + return success; + } -public abstract class JobOperatorBuilder extends BuildableByVarAccess { @Override - public JobOperatorBuilder setStdVarAccess(VarAccess varAccess) { - return (JobOperatorBuilder) super.setStdVarAccess(varAccess); + public String getMessage() { + return message; } @Override - public JobOperatorBuilder setSysVarAccess(VarAccess varAccess) { - return (JobOperatorBuilder) super.setSysVarAccess(varAccess); + public Map getExtraMessage() { + return extraMsg; } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/properties/ClientProperties.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/ClientProperties.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/properties/ClientProperties.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/ClientProperties.java index 00c36a3032..6d165eeed6 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/properties/ClientProperties.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/ClientProperties.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.properties; +package org.apache.linkis.cli.application.interactor.properties; import java.util.HashMap; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoader.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropertiesLoader.java similarity index 82% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoader.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropertiesLoader.java index f6917206ab..d66cfe605d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/StdPropsLoader.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropertiesLoader.java @@ -15,24 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties; +package org.apache.linkis.cli.application.interactor.properties; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.properties.reader.PropertiesReader; import java.util.*; -public class StdPropsLoader implements PropertiesLoader { +public class PropertiesLoader { Map readersMap; - public StdPropsLoader() { + public PropertiesLoader() { this.readersMap = new HashMap<>(); } - @Override public PropertiesLoader setPropertiesReaders(PropertiesReader[] readers) { this.readersMap = new HashMap<>(); for (PropertiesReader reader : readers) { @@ -41,7 +39,6 @@ public PropertiesLoader setPropertiesReaders(PropertiesReader[] readers) { return this; } - @Override public PropertiesLoader addPropertiesReader(PropertiesReader reader) { if (reader != null) { readersMap.put(reader.getPropsId(), reader); @@ -49,7 +46,6 @@ public PropertiesLoader addPropertiesReader(PropertiesReader reader) { return this; } - @Override public PropertiesLoader addPropertiesReaders(PropertiesReader[] readers) { if (readers != null && readers.length > 0) { for (PropertiesReader reader : readers) { @@ -59,12 +55,10 @@ public PropertiesLoader addPropertiesReaders(PropertiesReader[] readers) { return this; } - @Override public void removePropertiesReader(String identifier) { readersMap.remove(identifier); } - @Override public ClientProperties[] loadProperties() { checkInit(); List propsList = new ArrayList<>(); @@ -81,7 +75,6 @@ public ClientProperties[] loadProperties() { return propsList.toArray(new ClientProperties[propsList.size()]); } - @Override public void checkInit() { if (readersMap == null || readersMap.size() == 0) { throw new PropsException( diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropsFilesScanner.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropsFilesScanner.java similarity index 81% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropsFilesScanner.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropsFilesScanner.java index 2bd120e5ca..8e343d0948 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/PropsFilesScanner.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/PropsFilesScanner.java @@ -15,14 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties; +package org.apache.linkis.cli.application.interactor.properties; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.constants.CommonConstants; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.properties.reader.PropertiesReader; -import org.apache.linkis.cli.core.interactor.properties.reader.PropsFileReader; +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.properties.reader.PropertiesReader; +import org.apache.linkis.cli.application.interactor.properties.reader.PropsFileReader; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; @@ -51,7 +51,7 @@ public List getPropsFiles(String rootPath) { try { files = (List) - FileUtils.listFiles(new File(rootPath), CommonConstants.CONFIG_EXTENSION, false); + FileUtils.listFiles(new File(rootPath), CliConstants.CONFIG_EXTENSION, false); } catch (Exception e) { throw new PropsException( "PRP0005", diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropertiesReader.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropertiesReader.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropertiesReader.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropertiesReader.java index 3c86f1b1c3..65bc2d1e06 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropertiesReader.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropertiesReader.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties.reader; +package org.apache.linkis.cli.application.interactor.properties.reader; import java.util.Properties; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropsFileReader.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropsFileReader.java index 024a83311e..7bd23da140 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/properties/reader/PropsFileReader.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/properties/reader/PropsFileReader.java @@ -15,11 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.properties.reader; +package org.apache.linkis.cli.application.interactor.properties.reader; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PropsException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.PropsException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.commons.lang3.StringUtils; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisSubmitValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/InteractiveDescValidator.java similarity index 76% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisSubmitValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/InteractiveDescValidator.java index fa0f31dc1e..03eef75612 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisSubmitValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/InteractiveDescValidator.java @@ -18,67 +18,56 @@ package org.apache.linkis.cli.application.interactor.validate; import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisSubmitJob; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.interactive.InteractiveJobDesc; import org.apache.commons.lang3.StringUtils; import java.util.Map; -public class LinkisSubmitValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof LinkisSubmitJob)) { - throw new ValidateException( - "VLD0007", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of LinkisSubmitValidator is not instance of LinkisSubmitJob. Type: " - + input.getClass().getCanonicalName()); - } +public class InteractiveDescValidator { + + public void doValidation(InteractiveJobDesc desc) throws LinkisClientRuntimeException { boolean ok = true; StringBuilder reasonSb = new StringBuilder(); - LinkisSubmitDesc submitDesc = ((LinkisSubmitJob) input).getJobDesc(); - if (StringUtils.isBlank(submitDesc.getSubmitUser())) { + if (StringUtils.isBlank(desc.getSubmitUser())) { reasonSb.append("Submit User cannot be empty or blank").append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank(submitDesc.getProxyUser())) { + if (StringUtils.isBlank(desc.getProxyUser())) { reasonSb .append("proxy(execute) User cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (submitDesc.getLabelMap() == null) { + if (desc.getLabelMap() == null) { reasonSb.append("labelMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getExecutionMap() == null) { + if (desc.getExecutionMap() == null) { reasonSb.append("ExecutionMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getSourceMap() == null) { + if (desc.getSourceMap() == null) { reasonSb.append("SourceMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getParamConfMap() == null) { + if (desc.getParamConfMap() == null) { reasonSb.append("startupMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getParamVarsMap() == null) { + if (desc.getParamVarsMap() == null) { reasonSb.append("variableMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getParamRunTimeMap() == null) { + if (desc.getParamRunTimeMap() == null) { reasonSb.append("runTimeMap cannot be null").append(System.lineSeparator()); ok = false; } - for (Map.Entry entry : submitDesc.getExecutionMap().entrySet()) { + for (Map.Entry entry : desc.getExecutionMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("ExecutionMap key cannot contains space character. key: ") @@ -87,7 +76,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { ok = false; } } - for (Map.Entry entry : submitDesc.getLabelMap().entrySet()) { + for (Map.Entry entry : desc.getLabelMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("LabelMap key cannot contains space character. key: ") @@ -108,7 +97,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { } } } - for (Map.Entry entry : submitDesc.getParamConfMap().entrySet()) { + for (Map.Entry entry : desc.getParamConfMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("startUpMap key cannot contains space character. key: ") @@ -145,7 +134,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { // } // } // } - for (Map.Entry entry : submitDesc.getParamVarsMap().entrySet()) { + for (Map.Entry entry : desc.getParamVarsMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("variablesMap key cannot contains space character. key: ") @@ -164,7 +153,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { // } // } } - for (Map.Entry entry : submitDesc.getSourceMap().entrySet()) { + for (Map.Entry entry : desc.getSourceMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("sourceMap key cannot contains space character. key: ") @@ -185,30 +174,30 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { } } } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { reasonSb .append(LinkisKeys.KEY_ENGINETYPE) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { reasonSb .append(LinkisKeys.KEY_CODETYPE) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { + if (StringUtils.isBlank((String) desc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { reasonSb .append(LinkisKeys.KEY_SCRIPT_PATH) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getExecutionMap().get(LinkisKeys.KEY_CODE)) + if (StringUtils.isBlank((String) desc.getExecutionMap().get(LinkisKeys.KEY_CODE)) && StringUtils.indexOfIgnoreCase( - (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") + (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") == -1) { reasonSb .append(LinkisKeys.KEY_CODE) @@ -216,15 +205,15 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { reasonSb .append(LinkisKeys.KEY_USER_CREATOR) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } else { - String userCreator = (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); - if (StringUtils.indexOf(submitDesc.getProxyUser(), "-") != -1) { + String userCreator = (String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); + if (StringUtils.indexOf(desc.getProxyUser(), "-") != -1) { reasonSb .append("\'proxyUser\' should not contain special character \'-\'") .append(System.lineSeparator()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisManageValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/JobCmdDescValidator.java similarity index 60% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisManageValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/JobCmdDescValidator.java index 519a2ee85c..95b95d5833 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisManageValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/JobCmdDescValidator.java @@ -17,30 +17,18 @@ package org.apache.linkis.cli.application.interactor.validate; -import org.apache.linkis.cli.application.interactor.job.LinkisManageJob; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisJobManDesc; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.jobcmd.JobCmdDesc; import org.apache.commons.lang3.StringUtils; -public class LinkisManageValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof LinkisManageJob)) { - throw new ValidateException( - "VLD0007", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of LinkisSubmitValidator is not instance of LinkisManageJob. Type: " - + input.getClass().getCanonicalName()); - } +public class JobCmdDescValidator { + public void doValidation(JobCmdDesc desc) throws LinkisClientRuntimeException { boolean ok = true; StringBuilder reasonSb = new StringBuilder(); - LinkisJobManDesc desc = ((LinkisManageJob) input).getJobDesc(); if (StringUtils.isBlank(desc.getJobID())) { reasonSb.append("jobId cannot be empty or blank").append(System.lineSeparator()); ok = false; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisOnceSubmitValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/OnceDescValidator.java similarity index 77% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisOnceSubmitValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/OnceDescValidator.java index fba6644700..f62cafe054 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/LinkisOnceSubmitValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/OnceDescValidator.java @@ -18,71 +18,59 @@ package org.apache.linkis.cli.application.interactor.validate; import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisOnceJob; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisOnceDesc; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.operator.once.OnceJobDesc; import org.apache.commons.lang3.StringUtils; import java.util.Map; -public class LinkisOnceSubmitValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof LinkisOnceJob)) { - throw new ValidateException( - "VLD0007", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of LinkisSubmitValidator is not instance of LinkisSubmitJob. Type: " - + input.getClass().getCanonicalName()); - } +public class OnceDescValidator { + public void doValidation(OnceJobDesc desc) throws LinkisClientRuntimeException { boolean ok = true; StringBuilder reasonSb = new StringBuilder(); - LinkisOnceDesc submitDesc = (LinkisOnceDesc) ((LinkisOnceJob) input).getJobDesc(); - if (StringUtils.isBlank(submitDesc.getSubmitUser())) { + if (StringUtils.isBlank(desc.getSubmitUser())) { reasonSb.append("Submit User cannot be empty or blank").append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank(submitDesc.getProxyUser())) { + if (StringUtils.isBlank(desc.getProxyUser())) { reasonSb .append("proxy(execute) User cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (submitDesc.getLabelMap() == null) { + if (desc.getLabelMap() == null) { reasonSb.append("labelMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getExecutionMap() == null) { + if (desc.getExecutionMap() == null) { reasonSb.append("ExecutionMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getSourceMap() == null) { + if (desc.getSourceMap() == null) { reasonSb.append("SourceMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getParamConfMap() == null) { + if (desc.getParamConfMap() == null) { reasonSb.append("startupMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getParamVarsMap() == null) { + if (desc.getParamVarsMap() == null) { reasonSb.append("variableMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getParamRunTimeMap() == null) { + if (desc.getParamRunTimeMap() == null) { reasonSb.append("runTimeMap cannot be null").append(System.lineSeparator()); ok = false; } - if (submitDesc.getJobContentMap() == null) { + if (desc.getJobContentMap() == null) { reasonSb.append("jobContentMap cannot be null").append(System.lineSeparator()); ok = false; } - for (Map.Entry entry : submitDesc.getExecutionMap().entrySet()) { + for (Map.Entry entry : desc.getExecutionMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("ExecutionMap key cannot contains space character. key: ") @@ -91,7 +79,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { ok = false; } } - for (Map.Entry entry : submitDesc.getLabelMap().entrySet()) { + for (Map.Entry entry : desc.getLabelMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("LabelMap key cannot contains space character. key: ") @@ -112,7 +100,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { } } } - for (Map.Entry entry : submitDesc.getParamConfMap().entrySet()) { + for (Map.Entry entry : desc.getParamConfMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("startUpMap key cannot contains space character. key: ") @@ -149,7 +137,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { // } // } // } - for (Map.Entry entry : submitDesc.getParamVarsMap().entrySet()) { + for (Map.Entry entry : desc.getParamVarsMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("variablesMap key cannot contains space character. key: ") @@ -168,7 +156,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { // } // } } - for (Map.Entry entry : submitDesc.getSourceMap().entrySet()) { + for (Map.Entry entry : desc.getSourceMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("sourceMap key cannot contains space character. key: ") @@ -189,7 +177,7 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { } } } - for (Map.Entry entry : submitDesc.getJobContentMap().entrySet()) { + for (Map.Entry entry : desc.getJobContentMap().entrySet()) { if (StringUtils.contains(entry.getKey(), " ")) { reasonSb .append("jobContentMap key cannot contains space character. key: ") @@ -208,30 +196,30 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { // } // } } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE))) { reasonSb .append(LinkisKeys.KEY_ENGINETYPE) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_CODETYPE))) { reasonSb .append(LinkisKeys.KEY_CODETYPE) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { + if (StringUtils.isBlank((String) desc.getSourceMap().get(LinkisKeys.KEY_SCRIPT_PATH))) { reasonSb .append(LinkisKeys.KEY_SCRIPT_PATH) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getExecutionMap().get(LinkisKeys.KEY_CODE)) + if (StringUtils.isBlank((String) desc.getExecutionMap().get(LinkisKeys.KEY_CODE)) && StringUtils.indexOfIgnoreCase( - (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") + (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE), "sqoop") == -1) { reasonSb .append(LinkisKeys.KEY_CODE) @@ -239,15 +227,15 @@ public void doValidation(Object input) throws LinkisClientRuntimeException { .append(System.lineSeparator()); ok = false; } - if (StringUtils.isBlank((String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { + if (StringUtils.isBlank((String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR))) { reasonSb .append(LinkisKeys.KEY_USER_CREATOR) .append(" cannot be empty or blank") .append(System.lineSeparator()); ok = false; } else { - String userCreator = (String) submitDesc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); - if (StringUtils.indexOf(submitDesc.getProxyUser(), "-") != -1) { + String userCreator = (String) desc.getLabelMap().get(LinkisKeys.KEY_USER_CREATOR); + if (StringUtils.indexOf(desc.getProxyUser(), "-") != -1) { reasonSb .append("\'proxyUser\' should not contain special character \'-\'") .append(System.lineSeparator()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParamValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParamValidator.java similarity index 75% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParamValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParamValidator.java index abbc695976..b54dd6b723 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParamValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParamValidator.java @@ -15,14 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.validate; +package org.apache.linkis.cli.application.interactor.validate; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; -public class ParamValidator implements Validator { - @Override +public class ParamValidator { public void doValidation(Object input) throws LinkisClientRuntimeException { if (!(input instanceof Params)) { // TODO:throw diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParsedTplValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParsedTplValidator.java similarity index 69% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParsedTplValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParsedTplValidator.java index e7f14e2df1..a7ea05c72b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/ParsedTplValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/ParsedTplValidator.java @@ -15,15 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.validate; +package org.apache.linkis.cli.application.interactor.validate; -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import java.text.MessageFormat; import java.util.List; @@ -35,20 +34,10 @@ * 1. Check if there is missing or unknown option. 2. Call checkParam method for command-specific * validation. */ -public class ParsedTplValidator implements Validator { +public class ParsedTplValidator { private static final Logger logger = LoggerFactory.getLogger(ParsedTplValidator.class); - @Override - public void doValidation(Object input) throws CommandException { - if (!(input instanceof CmdTemplate)) { - throw new ValidateException( - "VLD0006", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of ParsedTplValidator is not instance of CmdTemplate"); - } - - CmdTemplate parsedTemplateCopy = (CmdTemplate) input; + public void doValidation(CmdTemplate parsedTemplateCopy) throws CommandException { String msg = "start validating command \"{0}\", template \"{1}\""; logger.info( diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java similarity index 75% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java index 8c0653cb4f..9e5a9a6457 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/validate/UJESContextValidator.java @@ -18,28 +18,18 @@ package org.apache.linkis.cli.application.interactor.validate; import org.apache.linkis.cli.application.constants.LinkisConstants; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.ValidateException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.linkis.cli.application.operator.ujes.UJESClientContext; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.ValidateException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; import org.apache.commons.lang3.StringUtils; -public class UJESContextValidator implements Validator { - @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof UJESClientContext)) { - throw new ValidateException( - "VLD0009", - ErrorLevel.ERROR, - CommonErrMsg.ValidationErr, - "Input of UJESContextValidator is not instance of UjesClientDriverContext"); - } +public class UJESContextValidator { + public void doValidation(UJESClientContext context) throws LinkisClientRuntimeException { boolean ok = true; StringBuilder reasonSb = new StringBuilder(); - UJESClientContext context = (UJESClientContext) input; if (StringUtils.isBlank(context.getGatewayUrl())) { reasonSb.append("gatewayUrl cannot be empty or blank").append(System.lineSeparator()); ok = false; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/StdVarAccess.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/var/VarAccessImpl.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/StdVarAccess.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/var/VarAccessImpl.java index 267dae2325..9a871828a3 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/var/StdVarAccess.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/var/VarAccessImpl.java @@ -15,18 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.var; - -import org.apache.linkis.cli.common.entity.command.ParamItem; -import org.apache.linkis.cli.common.entity.command.Params; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.VarAccessException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.SpecialMap; -import org.apache.linkis.cli.core.utils.converter.AbstractStringConverter; -import org.apache.linkis.cli.core.utils.converter.PredefinedStringConverters; +package org.apache.linkis.cli.application.interactor.var; + +import org.apache.linkis.cli.application.entity.command.ParamItem; +import org.apache.linkis.cli.application.entity.command.Params; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.VarAccessException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.SpecialMap; +import org.apache.linkis.cli.application.interactor.command.template.converter.AbstractStringConverter; +import org.apache.linkis.cli.application.interactor.command.template.converter.PredefinedStringConverters; +import org.apache.linkis.cli.application.interactor.properties.ClientProperties; import org.apache.commons.lang3.StringUtils; @@ -35,14 +35,14 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class StdVarAccess implements VarAccess { - private static Logger logger = LoggerFactory.getLogger(StdVarAccess.class); +public class VarAccessImpl implements VarAccess { + private static Logger logger = LoggerFactory.getLogger(VarAccessImpl.class); private Params cmdParams; private ClientProperties userConf; private ClientProperties defaultConf; private Map subMapCache; - public StdVarAccess setCmdParams(Params cmdParams) { + public VarAccessImpl setCmdParams(Params cmdParams) { this.cmdParams = cmdParams; return this; } @@ -51,7 +51,7 @@ public Params getSubParam(String identifier) { return this.cmdParams; } - public StdVarAccess setUserConf(ClientProperties userConf) { + public VarAccessImpl setUserConf(ClientProperties userConf) { this.userConf = userConf; return this; } @@ -60,7 +60,7 @@ public ClientProperties getUserConf(String identifier) { return this.userConf; } - public StdVarAccess setDefaultConf(ClientProperties defaultConf) { + public VarAccessImpl setDefaultConf(ClientProperties defaultConf) { this.defaultConf = defaultConf; return this; } @@ -69,7 +69,7 @@ public ClientProperties getDefaultConf(String identifier) { return this.defaultConf; } - public StdVarAccess init() { + public VarAccessImpl init() { this.subMapCache = new HashMap<>(); putSubMapCache(subMapCache, cmdParams); return this; @@ -104,7 +104,6 @@ private void putSubMapCache(Map subMapCache, Params param) { } } - @Override public void checkInit() { if (this.cmdParams == null || this.defaultConf == null || this.subMapCache == null) { throw new VarAccessException( diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/TriggerEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/FetchResultEvent.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/TriggerEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/FetchResultEvent.java index dada521b59..d538a40dff 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/TriggerEvent.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/FetchResultEvent.java @@ -17,4 +17,4 @@ package org.apache.linkis.cli.application.observer.event; -public class TriggerEvent extends SingleObserverEvent {} +public class FetchResultEvent extends SingleObserverEvent {} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LinkisClientEvent.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/LogStartEvent.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/event/SingleObserverEvent.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/observer/listener/LinkisClientListener.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/JobOperBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/JobOperBuilder.java new file mode 100644 index 0000000000..0e1604b204 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/JobOperBuilder.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator; + +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; + +public interface JobOperBuilder { + JobOper build(CliCtx ctx); +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/OperManager.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/OperManager.java new file mode 100644 index 0000000000..a41bfd5f9f --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/OperManager.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator; + +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; + +import java.util.HashMap; +import java.util.Map; + +public class OperManager { + + private static Map builderMap = new HashMap<>(); + + public static void register(String name, JobOperBuilder builder) { + builderMap.put(name, builder); + } + + public static void remove(String name) { + builderMap.remove(name); + } + + public static JobOper getNew(String name, CliCtx ctx) { + JobOperBuilder builder = builderMap.get(name); + if (builder == null) { + return null; + } + return builder.build(ctx); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/LinkisNodeStatus.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobConstants.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisOnceDesc.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobDesc.java similarity index 82% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisOnceDesc.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobDesc.java index a98d99383e..2bb63c0e0e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/desc/LinkisOnceDesc.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobDesc.java @@ -15,18 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.desc; +package org.apache.linkis.cli.application.operator.once; -import org.apache.linkis.cli.common.entity.job.JobDescription; -import org.apache.linkis.cli.common.entity.var.VarAccess; +import org.apache.linkis.cli.application.entity.var.VarAccess; import java.util.Map; -public class LinkisOnceDesc implements JobDescription { - - private VarAccess stdVarAccess; - private VarAccess sysVarAccess; - +public class OnceJobDesc { + private VarAccess varAccess; private String submitUser; private String proxyUser; private String creator; @@ -38,20 +34,12 @@ public class LinkisOnceDesc implements JobDescription { private Map sourceMap; private Map jobContentMap; - public VarAccess getStdVarAccess() { - return stdVarAccess; - } - - public void setStdVarAccess(VarAccess stdVarAccess) { - this.stdVarAccess = stdVarAccess; - } - - public VarAccess getSysVarAccess() { - return sysVarAccess; + public VarAccess getVarAccess() { + return varAccess; } - public void setSysVarAccess(VarAccess sysVarAccess) { - this.sysVarAccess = sysVarAccess; + public void setVarAccess(VarAccess varAccess) { + this.varAccess = varAccess; } public String getSubmitUser() { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobOper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobOper.java new file mode 100644 index 0000000000..afe2f66996 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceJobOper.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.once; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.LinkisJobStatus; +import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; +import org.apache.linkis.computation.client.LinkisJobBuilder$; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJob; +import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder; +import org.apache.linkis.computation.client.once.simple.SubmittableSimpleOnceJob; +import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator; +import org.apache.linkis.computation.client.operator.impl.EngineConnLogs; + +import org.apache.commons.lang3.StringUtils; + +public class OnceJobOper implements JobOper { + + EngineConnLogOperator logOperator = null; + private SimpleOnceJob onceJob; + private String serverUrl; + private String engineTypeForECM; + private Boolean isLogFin = false; + + public void init(OnceJobDesc desc) { + + VarAccess varAccess = desc.getVarAccess(); + + serverUrl = varAccess.getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL); + + LinkisJobBuilder$.MODULE$.setDefaultClientConfig( + UJESClientFactory.generateDWSClientConfig(varAccess)); + LinkisJobBuilder$.MODULE$.setDefaultUJESClient(UJESClientFactory.getReusable(varAccess)); + + String engineTypeRaw = (String) desc.getLabelMap().get(LinkisKeys.KEY_ENGINETYPE); + engineTypeForECM = engineTypeRaw; + + if (StringUtils.isNotBlank(engineTypeRaw)) { + engineTypeForECM = StringUtils.split(engineTypeRaw, "-")[0]; + } else { + engineTypeForECM = ""; + } // TODO: remove parsing and let server side parse engineType + + onceJob = + new SimpleOnceJobBuilder() + .setCreateService(CliConstants.LINKIS_CLI) + .addExecuteUser(desc.getProxyUser()) + .setStartupParams(desc.getParamConfMap()) + .setLabels(desc.getLabelMap()) + .setRuntimeParams(desc.getParamRunTimeMap()) + .setSource(desc.getSourceMap()) + .setVariableMap(desc.getParamVarsMap()) + .setJobContent(desc.getJobContentMap()) + .build(); + } + + public String getServerUrl() { + return serverUrl; + } + + public SimpleOnceJob getOnceJob() { + return onceJob; + } + + public void setOnceJob(SimpleOnceJob onceJob) { + this.onceJob = onceJob; + } + + private void panicIfNull(Object obj) { + if (obj == null) { + throw new LinkisClientExecutionException( + "EXE0040", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "Instance of " + obj.getClass().getCanonicalName() + " is null"); + } + } + + public void submit() { + panicIfNull(onceJob); + if (!(onceJob instanceof SubmittableSimpleOnceJob)) { + throw new LinkisClientExecutionException( + "EXE0041", + ErrorLevel.ERROR, + CommonErrMsg.ExecutionErr, + "onceJob is not properly initiated"); + } + ((SubmittableSimpleOnceJob) onceJob).submit(); + } + + public void kill() { + panicIfNull(onceJob); + if (!getStatus().isJobFinishedState()) { + onceJob.kill(); + } + } + + public String getJobID() { + return onceJob.getId(); + } + + public String getUser() { + return "TODO"; + } + + public JobStatus getStatus() { + panicIfNull(onceJob); + String status = onceJob.getStatus(); + return LinkisJobStatus.convertFromNodeStatusString(status); + } + + public void waitForComplete() { + panicIfNull(onceJob); + onceJob.waitForCompleted(); + } + + public String getCurrentLog() { + panicIfNull(onceJob); + if (logOperator == null) { + logOperator = + (EngineConnLogOperator) onceJob.getOperator(EngineConnLogOperator.OPERATOR_NAME()); + logOperator.setECMServiceInstance( + ((SubmittableSimpleOnceJob) onceJob).getECMServiceInstance()); + logOperator.setEngineConnType(engineTypeForECM); + // logOperator.setPageSize(OnceJobConstants.MAX_LOG_SIZE_ONCE); + logOperator.setIgnoreKeywords(OnceJobConstants.LOG_IGNORE_KEYWORDS); + } + EngineConnLogs logs = + (EngineConnLogs) logOperator.apply(); // for some reason we have to add type conversion, + // otherwise mvn testCompile fails + StringBuilder logBuilder = new StringBuilder(); + for (String log : logs.logs()) { + logBuilder.append(log).append(System.lineSeparator()); + } + String status = onceJob.getStatus(); + LinkisJobStatus jobStatus = LinkisJobStatus.convertFromNodeStatusString(status); + if ((logs.logs() == null || logs.logs().size() <= 0) && jobStatus.isJobFinishedState()) { + isLogFin = true; + } + return logBuilder.toString(); + // System.out.println(logs.logs().size()); + } + + public Boolean isLogFin() { + return isLogFin; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceOperBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceOperBuilder.java new file mode 100644 index 0000000000..906fc03406 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/once/OnceOperBuilder.java @@ -0,0 +1,170 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.operator.once; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.constants.LinkisKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.interactor.job.common.KeyParser; +import org.apache.linkis.cli.application.operator.JobOperBuilder; +import org.apache.linkis.cli.application.utils.CliUtils; + +import org.apache.commons.lang3.StringUtils; + +import java.util.*; + +public class OnceOperBuilder implements JobOperBuilder { + @Override + public JobOper build(CliCtx ctx) { + + OnceJobDesc desc = new OnceJobDesc(); + + Map confMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_PARAM_CONF); + Map runtimeMap = + ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_PARAM_RUNTIME); + Map varMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_PARAM_VAR); + Map labelMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_LABEL); + Map sourceMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_SOURCE); + Map executionMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_EXEC); + Map jobContentMap = ctx.getVarAccess().getVar(Map.class, CliKeys.JOB_CONTENT); + + confMap = confMap == null ? new HashMap<>() : confMap; + runtimeMap = runtimeMap == null ? new HashMap<>() : runtimeMap; + varMap = varMap == null ? new HashMap<>() : varMap; + labelMap = labelMap == null ? new HashMap<>() : labelMap; + sourceMap = sourceMap == null ? new HashMap<>() : sourceMap; + executionMap = executionMap == null ? new HashMap<>() : executionMap; + jobContentMap = jobContentMap == null ? new HashMap<>() : jobContentMap; + + confMap = KeyParser.removePrefixForKeysInMap(confMap); + runtimeMap = KeyParser.removePrefixForKeysInMap(runtimeMap); + labelMap = KeyParser.removePrefixForKeysInMap(labelMap); + sourceMap = KeyParser.removePrefixForKeysInMap(sourceMap); + executionMap = KeyParser.removePrefixForKeysInMap(executionMap); + jobContentMap = KeyParser.removePrefixForKeysInMap(jobContentMap); + + for (String key : ctx.getVarAccess().getAllVarKeys()) { + Object val = ctx.getVarAccess().getVar(Object.class, key); + if (!(val instanceof Map) && val != null) { + // note that we allow it to overwrite existing values in map + if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_CONF)) { + KeyParser.removePrefixAndPutValToMap(confMap, key, val, CliKeys.JOB_PARAM_CONF); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_VAR)) { + KeyParser.removePrefixAndPutValToMap(varMap, key, val, CliKeys.JOB_PARAM_VAR); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_PARAM_RUNTIME)) { + KeyParser.removePrefixAndPutValToMap(runtimeMap, key, val, CliKeys.JOB_PARAM_RUNTIME); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_EXEC)) { + KeyParser.removePrefixAndPutValToMap(executionMap, key, val, CliKeys.JOB_EXEC); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_LABEL)) { + KeyParser.removePrefixAndPutValToMap(labelMap, key, val, CliKeys.JOB_LABEL); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_SOURCE)) { + KeyParser.removePrefixAndPutValToMap(sourceMap, key, val, CliKeys.JOB_SOURCE); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.JOB_CONTENT)) { + KeyParser.removePrefixAndPutValToMap(jobContentMap, key, val, CliKeys.JOB_CONTENT); + } else if (StringUtils.startsWithIgnoreCase(key, CliKeys.LINKIS_CLIENT_COMMON)) { + // do nothing + } else { + // confMap.put(key, stdVarAccess.getVar(Object.class, key)); + } + } + } + + Boolean isAsync = + ctx.getVarAccess().getVarOrDefault(Boolean.class, CliKeys.LINKIS_CLIENT_ASYNC_OPT, false); + + String creator; + if (!isAsync) { + creator = + ctx.getVarAccess() + .getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_DEFAULT); + } else { + creator = + ctx.getVarAccess() + .getVarOrDefault( + String.class, CliKeys.JOB_COMMON_CREATOR, CliConstants.JOB_CREATOR_ASYNC_DEFAULT); + } + String code = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_EXEC_CODE); + String engineType = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_LABEL_ENGINE_TYPE); + String runType = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_LABEL_CODE_TYPE); + String scriptPath = + ctx.getVarAccess() + .getVarOrDefault(String.class, CliKeys.JOB_SOURCE_SCRIPT_PATH, "LinkisCli"); + + String osUser = System.getProperty(CliKeys.LINUX_USER_KEY); + String[] adminUsers = StringUtils.split(CliKeys.ADMIN_USERS, ','); + Set adminSet = new HashSet<>(); + for (String admin : adminUsers) { + adminSet.add(admin); + } + String submitUsr = CliUtils.getSubmitUser(ctx.getVarAccess(), osUser, adminSet); + String proxyUsr = CliUtils.getProxyUser(ctx.getVarAccess(), submitUsr, adminSet); + + String enableExecuteOnce = + ctx.getVarAccess().getVarOrDefault(String.class, CliKeys.JOB_LABEL_EXECUTEONCE, "true"); + // default executeOnce-mode + if (Boolean.parseBoolean(enableExecuteOnce)) { + labelMap.put(LinkisKeys.KEY_EXECUTEONCE, ""); + } else { + labelMap.remove(LinkisKeys.KEY_EXECUTEONCE); + } + String codePath = ctx.getVarAccess().getVar(String.class, CliKeys.JOB_COMMON_CODE_PATH); + Object extraArgsObj = ctx.getVarAccess().getVar(Object.class, CliKeys.JOB_EXTRA_ARGUMENTS); + if (extraArgsObj != null + && extraArgsObj instanceof String[] + && StringUtils.isBlank(code) + && StringUtils.isBlank(codePath)) { + String[] extraArgs = (String[]) extraArgsObj; + codePath = extraArgs[0]; + if (extraArgs.length > 1) { + runtimeMap.put( + LinkisKeys.EXTRA_ARGUMENTS, Arrays.copyOfRange(extraArgs, 1, extraArgs.length)); + } + } + + if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { + code = CliUtils.readFile(codePath); + } + + executionMap.put(LinkisKeys.KEY_CODE, code); + labelMap.put(LinkisKeys.KEY_ENGINETYPE, engineType); + labelMap.put(LinkisKeys.KEY_CODETYPE, runType); + labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); + sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); + + desc.setVarAccess(ctx.getVarAccess()); + desc.setCreator(creator); + desc.setParamConfMap(confMap); + desc.setParamRunTimeMap(runtimeMap); + desc.setParamVarsMap(varMap); + desc.setLabelMap(labelMap); + desc.setSourceMap(sourceMap); + desc.setExecutionMap(executionMap); + desc.setSubmitUser(submitUsr); + desc.setProxyUser(proxyUsr); + desc.setJobContentMap(jobContentMap); + + OnceJobOper onceJobOper = new OnceJobOper(); + onceJobOper.init(desc); + + return onceJobOper; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOperator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOperator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java index 87c49cd141..191061332b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOperator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisJobOper.java @@ -18,15 +18,15 @@ package org.apache.linkis.cli.application.operator.ujes; import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.desc.LinkisSubmitDesc; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.interactive.InteractiveJobDesc; import org.apache.linkis.cli.application.operator.ujes.result.OpenLogResult2; import org.apache.linkis.cli.application.operator.ujes.result.ResultSetResult2; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.operator.JobOperator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.utils.CliUtils; import org.apache.linkis.common.exception.LinkisException; import org.apache.linkis.ujes.client.UJESClient; import org.apache.linkis.ujes.client.request.JobSubmitAction; @@ -43,9 +43,9 @@ import org.slf4j.LoggerFactory; /** Based on UjesClient */ -public class LinkisJobOperator implements JobOperator { +public class LinkisJobOper implements JobOper { protected UJESClient client; - private Logger logger = LoggerFactory.getLogger(LinkisJobOperator.class); + private Logger logger = LoggerFactory.getLogger(LinkisJobOper.class); private String serverUrl; public UJESClient getUJESClient() { @@ -87,7 +87,7 @@ public void checkInit() throws LinkisClientRuntimeException { * @param * @return */ - public LinkisOperResultAdapter submit(LinkisSubmitDesc jobDesc) + public LinkisOperResultAdapter submit(InteractiveJobDesc jobDesc) throws LinkisClientRuntimeException { checkInit(); JobSubmitResult jobSubmitResult; @@ -104,7 +104,7 @@ public LinkisOperResultAdapter submit(LinkisSubmitDesc jobDesc) .setLabels(jobDesc.getLabelMap()) .setSource(jobDesc.getSourceMap()) .build(); - logger.info("Request info to Linkis: \n{}", Utils.GSON.toJson(jobSubmitAction)); + logger.info("Request info to Linkis: \n{}", CliUtils.GSON.toJson(jobSubmitAction)); /* Old API */ // JobExecuteAction jobExecuteAction = JobExecuteAction.builder() @@ -127,7 +127,7 @@ public LinkisOperResultAdapter submit(LinkisSubmitDesc jobDesc) // jobExecuteResult = client.execute(jobExecuteAction); jobSubmitResult = client.submit(jobSubmitAction); - logger.info("Response info from Linkis: \n{}", Utils.GSON.toJson(jobSubmitAction)); + logger.info("Response info from Linkis: \n{}", CliUtils.GSON.toJson(jobSubmitAction)); } catch (Exception e) { // must throw if exception @@ -142,7 +142,8 @@ public LinkisOperResultAdapter submit(LinkisSubmitDesc jobDesc) if (jobSubmitResult == null) { reason = "JobSubmitResult is null"; } else if (0 != jobSubmitResult.getStatus()) { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobSubmitResult.getMessage(); } else { reason = "server returns blank TaskId"; } @@ -191,13 +192,14 @@ public LinkisOperResultAdapter queryJobStatus(String user, String taskID, String while (retryTime++ < MAX_RETRY_TIME) { try { jobStatusResult = client.status(executeResult); - logger.debug("job-status: " + Utils.GSON.toJson(jobStatusResult)); + logger.debug("job-status: " + CliUtils.GSON.toJson(jobStatusResult)); if (jobStatusResult == null || 0 != jobStatusResult.getStatus()) { String reason; if (jobStatusResult == null) { reason = "jobStatusResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobStatusResult.getMessage(); } String msg = MessageFormat.format( @@ -224,14 +226,15 @@ public LinkisOperResultAdapter queryJobStatus(String user, String taskID, String "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (jobStatusResult == null || 0 != jobStatusResult.getStatus()) { String reason; if (jobStatusResult == null) { reason = "jobStatusResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobStatusResult.getMessage(); } String msg = MessageFormat.format( @@ -255,17 +258,18 @@ private JobInfoResult queryJobInfoInternal(String user, String taskID) while (retryTime++ < MAX_RETRY_TIME) { try { jobInfoResult = client.getJobInfo(executeResult); - logger.debug("job-info: " + Utils.GSON.toJson(jobInfoResult)); + logger.debug("job-info: " + CliUtils.GSON.toJson(jobInfoResult)); if (jobInfoResult == null || 0 != jobInfoResult.getStatus()) { String reason; if (jobInfoResult == null) { reason = "JobInfoResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobInfoResult.getMessage(); } String msg = MessageFormat.format( - "Get job info failed. retry time : {0}/{1}. taskID={0}, Reason: {1}", + "Get job info failed. retry time : {0}/{1}. taskID={2}, Reason: {3}", retryTime, MAX_RETRY_TIME, taskID, reason); logger.debug( @@ -288,14 +292,15 @@ private JobInfoResult queryJobInfoInternal(String user, String taskID) "EXE0013", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (jobInfoResult == null || 0 != jobInfoResult.getStatus()) { String reason; if (jobInfoResult == null) { reason = "JobInfoResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobInfoResult.getMessage(); } String msg = MessageFormat.format( @@ -321,13 +326,14 @@ public LinkisOperResultAdapter queryRunTimeLogFromLine( while (retryTime++ < MAX_RETRY_TIME) { try { logResult = client.log(jobExecuteResult, fromLine, UJESConstants.MAX_LOG_SIZE); - logger.debug("runtime-log-result:" + Utils.GSON.toJson(logResult)); + logger.debug("runtime-log-result:" + CliUtils.GSON.toJson(logResult)); if (logResult == null || 0 != logResult.getStatus()) { String reason; if (logResult == null) { reason = "JobLogResult is null"; } else { reason = "server returns non-zero status-code"; + reason += logResult.getMessage(); } String msg = MessageFormat.format( @@ -350,14 +356,15 @@ public LinkisOperResultAdapter queryRunTimeLogFromLine( "EXE0016", ErrorLevel.ERROR, CommonErrMsg.ExecutionErr, msg, e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (logResult == null || 0 != logResult.getStatus()) { String reason; if (logResult == null) { reason = "JobLogResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += logResult.getMessage(); } String msg = MessageFormat.format( @@ -388,7 +395,7 @@ private OpenLogResult queryPersistedLogInternal(String logPath, String user, Str openLogResult = client.openLog( OpenLogAction.newBuilder().setLogPath(logPath).setProxyUser(user).build()); - logger.debug("persisted-log-result:" + Utils.GSON.toJson(openLogResult)); + logger.debug("persisted-log-result:" + CliUtils.GSON.toJson(openLogResult)); if (openLogResult == null || 0 != openLogResult.getStatus() || StringUtils.isBlank(openLogResult.getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL])) { @@ -396,7 +403,8 @@ private OpenLogResult queryPersistedLogInternal(String logPath, String user, Str if (openLogResult == null) { reason = "OpenLogResult is null"; } else if (0 != openLogResult.getStatus()) { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += openLogResult.getMessage(); } else { reason = "server returns empty log"; } @@ -425,7 +433,7 @@ private OpenLogResult queryPersistedLogInternal(String logPath, String user, Str e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (openLogResult == null || 0 != openLogResult.getStatus() @@ -475,7 +483,8 @@ public UJESResultAdapter queryProgress(String user, String taskID, String execId if (jobProgressResult == null) { reason = "JobProgressResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobProgressResult.getMessage(); } String msg = MessageFormat.format( @@ -502,7 +511,7 @@ public UJESResultAdapter queryProgress(String user, String taskID, String execId e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (jobProgressResult == null || 0 != jobProgressResult.getStatus()) { @@ -510,7 +519,8 @@ public UJESResultAdapter queryProgress(String user, String taskID, String execId if (jobProgressResult == null) { reason = "JobProgressResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += jobProgressResult.getMessage(); } String msg = MessageFormat.format( @@ -588,7 +598,7 @@ public LinkisOperResultAdapter queryResultSetPaths( e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (resultSetArray == null || 0 == resultSetArray.length) { String reason; @@ -623,13 +633,14 @@ public LinkisOperResultAdapter queryResultSetGivenResultSetPath( .setPageSize(pageSize) .build(); result = client.resultSet(action); - logger.debug("resultset-result:" + Utils.GSON.toJson(result)); + logger.debug("resultset-result:" + CliUtils.GSON.toJson(result)); if (result == null || 0 != result.getStatus()) { String reason; if (result == null) { reason = "array is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); } String msg = MessageFormat.format( @@ -656,14 +667,15 @@ public LinkisOperResultAdapter queryResultSetGivenResultSetPath( e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (result == null || 0 != result.getStatus()) { String reason; if (result == null) { reason = "ResultSetResult is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); } String msg = MessageFormat.format( @@ -691,13 +703,14 @@ public LinkisOperResultAdapter kill(String user, String taskId, String execId) killRequest.setTaskID(taskId); killRequest.setExecID(execId); result = client.kill(killRequest); - logger.debug("job-kill-result:" + Utils.GSON.toJson(result)); + logger.debug("job-kill-result:" + CliUtils.GSON.toJson(result)); if (result == null || 0 != result.getStatus()) { String reason; if (result == null) { reason = "result is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); } String msg = MessageFormat.format( @@ -725,14 +738,15 @@ public LinkisOperResultAdapter kill(String user, String taskId, String execId) e); } } - Utils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); + CliUtils.doSleepQuietly(UJESConstants.DRIVER_QUERY_SLEEP_MILLS); } if (result == null || 0 != result.getStatus()) { String reason; if (result == null) { reason = "result is null"; } else { - reason = "server returns non-zero status-code"; + reason = "server returns non-zero status-code. "; + reason += result.getMessage(); } String msg = MessageFormat.format( diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperatorBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperBuilder.java similarity index 60% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperatorBuilder.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperBuilder.java index 466bb9b7c0..318688c565 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperatorBuilder.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperBuilder.java @@ -17,28 +17,23 @@ package org.apache.linkis.cli.application.operator.ujes; -import org.apache.linkis.cli.application.constants.AppKeys; -import org.apache.linkis.cli.core.operator.JobOperatorBuilder; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.entity.context.CliCtx; +import org.apache.linkis.cli.application.entity.operator.JobOper; +import org.apache.linkis.cli.application.operator.JobOperBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class LinkisOperatorBuilder extends JobOperatorBuilder { - private static Logger logger = LoggerFactory.getLogger(LinkisOperatorBuilder.class); +public class LinkisOperBuilder implements JobOperBuilder { + private static Logger logger = LoggerFactory.getLogger(LinkisOperBuilder.class); @Override - public LinkisJobOperator build() { - - ((LinkisJobOperator) targetObj) - .setUJESClient(UJESClientFactory.getReusable(stdVarAccess, sysVarAccess)); - ((LinkisJobOperator) targetObj) - .setServerUrl(stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_GATEWAY_URL)); - - return (LinkisJobOperator) super.build(); - } - - @Override - protected LinkisJobOperator getTargetNewInstance() { - return new LinkisJobOperator(); + public JobOper build(CliCtx ctx) { + LinkisJobOper jobOper = new LinkisJobOper(); + jobOper.setUJESClient(UJESClientFactory.getReusable(ctx.getVarAccess())); + jobOper.setServerUrl( + ctx.getVarAccess().getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL)); + return jobOper; } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java index 457a7ba56d..bd22444a2d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/LinkisOperResultAdapter.java @@ -17,8 +17,8 @@ package org.apache.linkis.cli.application.operator.ujes; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.common.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; import java.util.Date; @@ -52,7 +52,7 @@ public interface LinkisOperResultAdapter { Boolean hasNextLogLine(); - LinkisResultSet getResultContent(); + ResultSet getResultContent(); Boolean resultHasNextPage(); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientContext.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java similarity index 60% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java index 2767929e8d..f77a909490 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESClientFactory.java @@ -17,16 +17,15 @@ package org.apache.linkis.cli.application.operator.ujes; -import org.apache.linkis.cli.application.constants.AppKeys; +import org.apache.linkis.cli.application.constants.CliKeys; import org.apache.linkis.cli.application.constants.LinkisConstants; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.BuilderException; +import org.apache.linkis.cli.application.exception.LinkisClientExecutionException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.linkis.cli.application.interactor.validate.UJESContextValidator; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.BuilderException; -import org.apache.linkis.cli.core.exception.LinkisClientExecutionException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.utils.CliUtils; import org.apache.linkis.httpclient.authentication.AuthenticationStrategy; import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy; import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; @@ -47,20 +46,20 @@ public class UJESClientFactory { private static UJESClient client; - public static UJESClient getReusable(VarAccess stdVarAccess, VarAccess sysVarAccess) { + public static UJESClient getReusable(VarAccess stdVarAccess) { if (client == null) { synchronized (UJESClientFactory.class) { if (client == null) { - client = getNew(stdVarAccess, sysVarAccess); + client = getNew(stdVarAccess); } } } return client; } - public static UJESClient getNew(VarAccess stdVarAccess, VarAccess sysVarAccess) { + public static UJESClient getNew(VarAccess stdVarAccess) { try { - DWSClientConfig config = generateDWSClientConfig(stdVarAccess, sysVarAccess); + DWSClientConfig config = generateDWSClientConfig(stdVarAccess); UJESClient ret = new UJESClientImpl(config); logger.info("Linkis ujes client inited."); return ret; @@ -70,9 +69,8 @@ public static UJESClient getNew(VarAccess stdVarAccess, VarAccess sysVarAccess) } } - public static DWSClientConfig generateDWSClientConfig( - VarAccess stdVarAccess, VarAccess sysVarAccess) { - UJESClientContext context = generateContext(stdVarAccess, sysVarAccess); + public static DWSClientConfig generateDWSClientConfig(VarAccess stdVarAccess) { + UJESClientContext context = generateContext(stdVarAccess); try { AuthenticationStrategy authenticationStrategy; if (StringUtils.isBlank(context.getAuthenticationStrategyStr()) @@ -115,53 +113,8 @@ public static DWSClientConfig generateDWSClientConfig( } } - public static DWSClientConfig generateDWSClientConfigForBML( - VarAccess stdVarAccess, VarAccess sysVarAccess) { - UJESClientContext context = generateContext(stdVarAccess, sysVarAccess); - try { - AuthenticationStrategy authenticationStrategy; - if (StringUtils.isBlank(context.getAuthenticationStrategyStr()) - || !LinkisConstants.AUTH_STRATEGY_TOKEN.equalsIgnoreCase( - context.getAuthenticationStrategyStr())) { - authenticationStrategy = - new StaticAuthenticationStrategy(); // this has to be newed here otherwise - // log-in fails for static - } else { - authenticationStrategy = new TokenAuthenticationStrategy(); - } - - DWSClientConfigBuilder builder = DWSClientConfigBuilder.newBuilder(); - DWSClientConfig config = - ((DWSClientConfigBuilder) - (builder - .addServerUrl(context.getGatewayUrl()) - .connectionTimeout(30000) - .discoveryEnabled(false) - .discoveryFrequency(1, TimeUnit.MINUTES) - .loadbalancerEnabled(false) - .maxConnectionSize(5) - .retryEnabled(false) - .readTimeout(context.getReadTimeoutMills()) - .setAuthenticationStrategy(authenticationStrategy) - .setAuthTokenKey("BML-AUTH") - .setAuthTokenValue("BML-AUTH"))) - .setDWSVersion(context.getDwsVersion()) - .build(); - - logger.info("Linkis ujes client inited."); - return config; - } catch (Exception e) { - throw new LinkisClientExecutionException( - "EXE0010", - ErrorLevel.ERROR, - CommonErrMsg.ExecutionInitErr, - "Cannot init DWSClientConfig", - e); - } - } - - private static UJESClientContext generateContext(VarAccess stdVarAccess, VarAccess sysVarAccess) { - String gatewayUrl = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_GATEWAY_URL); + private static UJESClientContext generateContext(VarAccess stdVarAccess) { + String gatewayUrl = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_GATEWAY_URL); if (StringUtils.isBlank(gatewayUrl)) { throw new BuilderException( "BLD0007", @@ -170,33 +123,33 @@ private static UJESClientContext generateContext(VarAccess stdVarAccess, VarAcce "Cannot build UjesClientDriverContext: gatewayUrl is empty"); } - String authKey = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_TOKEN_KEY); - String authValue = stdVarAccess.getVar(String.class, AppKeys.LINKIS_COMMON_TOKEN_VALUE); + String authKey = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_TOKEN_KEY); + String authValue = stdVarAccess.getVar(String.class, CliKeys.LINKIS_COMMON_TOKEN_VALUE); String authenticationStrategy = stdVarAccess.getVarOrDefault( String.class, - AppKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, + CliKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, LinkisConstants.AUTH_STRATEGY_STATIC); long connectionTimeout = stdVarAccess.getVarOrDefault( - Long.class, AppKeys.UJESCLIENT_COMMON_CONNECTT_TIMEOUT, 30000L); + Long.class, CliKeys.UJESCLIENT_COMMON_CONNECTT_TIMEOUT, 30000L); boolean discoveryEnabled = stdVarAccess.getVarOrDefault( - Boolean.class, AppKeys.UJESCLIENT_COMMON_DISCOVERY_ENABLED, false); + Boolean.class, CliKeys.UJESCLIENT_COMMON_DISCOVERY_ENABLED, false); boolean loadBalancerEnabled = stdVarAccess.getVarOrDefault( - Boolean.class, AppKeys.UJESCLIENT_COMMON_LOADBALANCER_ENABLED, true); + Boolean.class, CliKeys.UJESCLIENT_COMMON_LOADBALANCER_ENABLED, true); int maxConnectionSize = stdVarAccess.getVarOrDefault( - Integer.class, AppKeys.UJESCLIENT_COMMON_MAX_CONNECTION_SIZE, 5); + Integer.class, CliKeys.UJESCLIENT_COMMON_MAX_CONNECTION_SIZE, 5); boolean retryEnabled = - stdVarAccess.getVarOrDefault(Boolean.class, AppKeys.UJESCLIENT_COMMON_RETRY_ENABLED, false); + stdVarAccess.getVarOrDefault(Boolean.class, CliKeys.UJESCLIENT_COMMON_RETRY_ENABLED, false); long readTimeout = - stdVarAccess.getVarOrDefault(Long.class, AppKeys.UJESCLIENT_COMMON_READTIMEOUT, 30000L); + stdVarAccess.getVarOrDefault(Long.class, CliKeys.UJESCLIENT_COMMON_READTIMEOUT, 30000L); String dwsVersion = - stdVarAccess.getVarOrDefault(String.class, AppKeys.UJESCLIENT_COMMON_DWS_VERSION, "v1"); + stdVarAccess.getVarOrDefault(String.class, CliKeys.UJESCLIENT_COMMON_DWS_VERSION, "v1"); UJESClientContext context = new UJESClientContext(); @@ -212,8 +165,8 @@ private static UJESClientContext generateContext(VarAccess stdVarAccess, VarAcce context.setReadTimeoutMills(readTimeout); context.setDwsVersion(dwsVersion); - logger.info("==========UJES_CTX============\n" + Utils.GSON.toJson(context)); - Validator ctxValidator = new UJESContextValidator(); + logger.info("==========UJES_CTX============\n" + CliUtils.GSON.toJson(context)); + UJESContextValidator ctxValidator = new UJESContextValidator(); ctxValidator.doValidation(context); return context; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java index 845949079f..6166f7b2fe 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESConstants.java @@ -40,7 +40,4 @@ public class UJESConstants { public static final Integer IDX_FOR_LOG_TYPE_ALL = 3; // 0: Error 1: WARN 2:INFO 3: ALL public static final int DEFAULT_PAGE_SIZE = 500; - - public static final String DEFAULT_SPARK_ENGINE = "spark-2.4.3"; - public static final String DEFAULT_HIVE_ENGINE = "hive-1.2.1"; } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java similarity index 90% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java index d402e01fe8..6c746ff57c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/UJESResultAdapter.java @@ -18,17 +18,16 @@ package org.apache.linkis.cli.application.operator.ujes; import org.apache.linkis.cli.application.constants.LinkisKeys; -import org.apache.linkis.cli.application.interactor.job.LinkisJobStatus; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.application.operator.OperatorUtils; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.LinkisJobStatus; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; import org.apache.linkis.cli.application.operator.ujes.result.OpenLogResult2; import org.apache.linkis.cli.application.operator.ujes.result.ResultSetResult2; import org.apache.linkis.cli.application.operator.ujes.result.UJESResult; -import org.apache.linkis.cli.application.utils.Utils; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.utils.CliUtils; import org.apache.linkis.httpclient.dws.response.DWSResult; import org.apache.linkis.ujes.client.request.UserAction; import org.apache.linkis.ujes.client.response.JobInfoResult; @@ -170,7 +169,7 @@ public String getStrongerExecId() { execId = (String) ((JobInfoResult) result).getTask().get(LinkisKeys.KEY_STRONGER_EXECID); } } - if (Utils.isValidExecId(execId)) { + if (CliUtils.isValidExecId(execId)) { return execId; } return null; @@ -277,8 +276,7 @@ public String getLog() { ((OpenLogResult2) result).getResult().getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL]; Integer fromLine = ((OpenLogResult2) result).getFromLine(); return StringUtils.substring( - allLog, - OperatorUtils.getFirstIndexSkippingLines(allLog, fromLine == null ? 0 : fromLine)); + allLog, getFirstIndexSkippingLines(allLog, fromLine == null ? 0 : fromLine)); } return null; } @@ -294,7 +292,7 @@ public Integer getNextLogLine() { if (result instanceof OpenLogResult2 && ((OpenLogResult2) result).getResult() != null && ((OpenLogResult2) result).getResult().getLog() != null) { - return OperatorUtils.getNumOfLines( + return getNumOfLines( ((OpenLogResult2) result).getResult().getLog()[UJESConstants.IDX_FOR_LOG_TYPE_ALL]); } return null; @@ -315,7 +313,7 @@ public Boolean hasNextLogLine() { } @Override - public LinkisResultSet getResultContent() { + public ResultSet getResultContent() { if (result == null) { return null; } @@ -323,7 +321,7 @@ public LinkisResultSet getResultContent() { && ((ResultSetResult2) result).getResultSetResult() != null && ((ResultSetResult2) result).getResultSetResult().getFileContent() != null && ((ResultSetResult2) result).getResultSetResult().getMetadata() != null) { - LinkisResultSet ret = new LinkisResultSet(); + ResultSet ret = new ResultSet(); ret.setResultsetIdx(((ResultSetResult2) result).getIdxResultSet()); if (((ResultSetResult2) result).getResultSetResult().getMetadata() != null) { ret.setResultMeta( @@ -560,4 +558,54 @@ public String getRequestApplicationName() { } return null; } + + private int getNumOfLines(String str) { + if (str == null || str.length() == 0) { + return 0; + } + int lines = 1; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + lines++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + lines++; + } + } + return lines; + } + + private int getFirstIndexSkippingLines(String str, Integer lines) { + if (str == null || str.length() == 0 || lines < 0) { + return -1; + } + if (lines == 0) { + return 0; + } + + int curLineIdx = 0; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + curLineIdx++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + curLineIdx++; + } else { + continue; + } + + if (curLineIdx >= lines) { + return pos + 1; + } + } + return -1; + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/OpenLogResult2.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/ResultSetResult2.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/operator/ujes/result/UJESResult.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/HelpInfoPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/HelpPresenter.java similarity index 65% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/HelpInfoPresenter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/HelpPresenter.java index 186081a560..f02be7fefc 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/HelpInfoPresenter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/HelpPresenter.java @@ -15,23 +15,20 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present; +package org.apache.linkis.cli.application.present; -import org.apache.linkis.cli.common.entity.command.CmdOption; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.interactor.command.template.option.MapOption; -import org.apache.linkis.cli.core.interactor.command.template.option.Parameter; -import org.apache.linkis.cli.core.interactor.command.template.option.StdOption; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.StdOutWriter; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.present.model.HelpInfoModel; +import org.apache.linkis.cli.application.entity.command.CmdOption; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.command.template.option.MapOption; +import org.apache.linkis.cli.application.interactor.command.template.option.Parameter; +import org.apache.linkis.cli.application.interactor.command.template.option.StdOption; +import org.apache.linkis.cli.application.present.model.HelpInfoModel; +import org.apache.linkis.cli.application.utils.LoggerManager; import java.util.ArrayList; import java.util.List; @@ -39,12 +36,11 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class HelpInfoPresenter implements Presenter { - private static Logger logger = LoggerFactory.getLogger(HelpInfoPresenter.class); - DisplayOperator driver = new StdOutWriter(); +public class HelpPresenter implements Presenter { + private static Logger logger = LoggerFactory.getLogger(HelpPresenter.class); @Override - public void present(Model model, PresentWay presentWay) { + public void present(Model model) { if (!(model instanceof HelpInfoModel)) { throw new PresenterException( "PST0010", @@ -52,15 +48,12 @@ public void present(Model model, PresentWay presentWay) { CommonErrMsg.PresenterErr, "Input for HelpInfoPresenter is not instance of model"); } - if (driver == null) { - throw new PresenterException( - "PST0007", ErrorLevel.ERROR, CommonErrMsg.PresenterErr, "Driver is null"); - } + HelpInfoModel helpInfoModel = (HelpInfoModel) model; String helpInfo = getUsage(helpInfoModel.getTemplate()); - driver.doOutput(new StdoutDisplayData(helpInfo)); + LoggerManager.getPlaintTextLogger().info(helpInfo); } /** Help info for sub-command */ diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/PropsValidator.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/JobInfoPresenter.java similarity index 63% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/PropsValidator.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/JobInfoPresenter.java index 6231f4eb88..3a264f827c 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/validate/PropsValidator.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/JobInfoPresenter.java @@ -15,17 +15,16 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.validate; +package org.apache.linkis.cli.application.present; -import org.apache.linkis.cli.common.entity.properties.ClientProperties; -import org.apache.linkis.cli.common.entity.validate.Validator; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; -public class PropsValidator implements Validator { +public class JobInfoPresenter implements Presenter { @Override - public void doValidation(Object input) throws LinkisClientRuntimeException { - if (!(input instanceof ClientProperties)) { - // TODO:throw - } + public void present(Model model) { + LoggerManager.getPlaintTextLogger().info(CliUtils.GSON.toJson(model)); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisLogPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/LogPresenter.java similarity index 61% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisLogPresenter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/LogPresenter.java index 291d7314db..9710be95db 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/LinkisLogPresenter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/LogPresenter.java @@ -17,31 +17,34 @@ package org.apache.linkis.cli.application.present; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; import org.apache.linkis.cli.application.present.model.LinkisLogModel; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.entity.present.PresentWay; -import org.apache.linkis.cli.common.entity.present.Presenter; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.PresentModeImpl; -import org.apache.linkis.cli.core.present.display.DisplayOperFactory; -import org.apache.linkis.cli.core.present.display.DisplayOperator; -import org.apache.linkis.cli.core.present.display.data.StdoutDisplayData; -import org.apache.linkis.cli.core.utils.CommonUtils; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class LinkisLogPresenter implements Presenter, LinkisClientListener { - private static Logger logger = LoggerFactory.getLogger(LinkisLogPresenter.class); +public class LogPresenter implements Presenter, LinkisClientListener { + private static Logger logger = LoggerFactory.getLogger(LogPresenter.class); @Override - public void present(Model model, PresentWay presentWay) { + public void update(LinkisClientEvent event, Object msg) { + Model model = new LinkisLogModel(); + model.buildModel(msg); + this.present(model); + } + + @Override + public void present(Model model) { if (!(model instanceof LinkisLogModel)) { throw new PresenterException( "PST0001", @@ -50,24 +53,16 @@ public void present(Model model, PresentWay presentWay) { "Input model for \"LinkisLogPresenter\" is not instance of \"LinkisJobIncLogModel\""); } LinkisLogModel logModel = (LinkisLogModel) model; - DisplayOperator displayOper = DisplayOperFactory.getDisplayOper(PresentModeImpl.STDOUT); while (!logModel.logFinReceived()) { String incLog = logModel.consumeLog(); if (StringUtils.isNotEmpty(incLog)) { - displayOper.doOutput(new StdoutDisplayData(incLog)); + LoggerManager.getPlaintTextLogger().info(incLog); } - CommonUtils.doSleepQuietly(500l); + CliUtils.doSleepQuietly(500l); } String incLog = logModel.consumeLog(); if (StringUtils.isNotEmpty(incLog)) { - displayOper.doOutput(new StdoutDisplayData(incLog)); + LoggerManager.getPlaintTextLogger().info(incLog); } } - - @Override - public void update(LinkisClientEvent event, Object msg) { - Model model = new LinkisLogModel(); - model.buildModel(msg); - this.present(model, null); - } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/ResultPresenter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/ResultPresenter.java new file mode 100644 index 0000000000..3c7281a7eb --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/ResultPresenter.java @@ -0,0 +1,301 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present; + +import org.apache.linkis.cli.application.constants.CliConstants; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.entity.present.Presenter; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; +import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.cli.application.observer.listener.LinkisClientListener; +import org.apache.linkis.cli.application.present.file.ResultFileWriter; +import org.apache.linkis.cli.application.present.model.LinkisResultModel; +import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; + +import org.apache.commons.lang3.StringUtils; + +import java.text.MessageFormat; +import java.util.*; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class ResultPresenter implements Presenter, LinkisClientListener { + private static Logger logger = LoggerFactory.getLogger(ResultPresenter.class); + private Boolean writeToFile = false; + private String filePath = ""; + + public ResultPresenter() {} + + public ResultPresenter(Boolean writeToFile, String filePath) { + this.writeToFile = writeToFile; + this.filePath = filePath; + } + + @Override + public void update(LinkisClientEvent event, Object msg) { + Model model = new LinkisResultModel(); + model.buildModel(msg); + this.present(model); + } + + @Override + public void present(Model model) { + if (!(model instanceof LinkisResultModel)) { + throw new PresenterException( + "PST0001", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Input model for \"LinkisResultPresenter\" is not instance of \"LinkisResultModel\""); + } + LinkisResultModel resultModel = (LinkisResultModel) model; + + LoggerManager.getPlaintTextLogger().info(formatResultIndicator(resultModel)); + + if (!resultModel.getJobStatus().isJobSuccess()) { + LoggerManager.getInformationLogger() + .info("JobStatus is not \'success\'. Will not retrieve result-set."); + return; + } + String msg = ""; + if (resultModel.hasResult()) { + msg = + "Retrieving result-set, may take time if result-set is large, please do not exit program."; + } else { + msg = "Your job has no result."; + } + LoggerManager.getInformationLogger().info(msg); + + int preIdx = -1; + StringBuilder resultSb = new StringBuilder(); + + while (!resultModel.resultFinReceived()) { + preIdx = presentOneIteration(resultModel, preIdx, resultSb); + CliUtils.doSleepQuietly(500l); + } + presentOneIteration(resultModel, preIdx, resultSb); + + if (writeToFile) { + LoggerManager.getInformationLogger() + .info("ResultSet has been successfully written to path: " + filePath); + } + } + + protected int presentOneIteration( + LinkisResultModel resultModel, int preIdx, StringBuilder resultSb) { + List resultSets = resultModel.consumeResultContent(); + if (resultSets != null && !resultSets.isEmpty()) { + for (ResultSet c : resultSets) { + int idxResultset = c.getResultsetIdx(); + /** + * Notice: we assume result-sets are visited one by one in non-descending order!!! i.e. + * either idxResultset == preIdx or idxResultset - preIdx == 1 i.e. resultsets[0] -> + * resultsets[1] -> ... + */ + if (idxResultset - preIdx != 0 && idxResultset - preIdx != 1) { + throw new PresenterException( + "PST0002", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Linkis resultsets are visited in descending order or are not visited one-by-one"); + } + + boolean flag = idxResultset > preIdx; + if (idxResultset - preIdx == 1) { + resultSb.setLength(0); + resultSb + .append(MessageFormat.format(CliConstants.RESULTSET_LOGO, idxResultset + 1)) + .append(System.lineSeparator()); + if (c.getResultMeta() != null) { + resultSb.append(CliConstants.RESULTSET_META_BEGIN_LOGO).append(System.lineSeparator()); + resultSb.append(formatResultMeta(c.getResultMeta())); + resultSb.append(CliConstants.RESULTSET_META_END_LOGO).append(System.lineSeparator()); + } + } + preIdx = idxResultset; + String contentStr = formatResultContent(c.getResultMeta(), c.getContent()); + if (contentStr != null) { + resultSb.append(contentStr); + } + if (resultSb.length() != 0) { + if (writeToFile) { + String resultFileName = + resultModel.getUser() + + "-task-" + + resultModel.getJobID() + + "-result-" + + String.valueOf(idxResultset + 1) + + ".txt"; + ResultFileWriter.writeToFile(filePath, resultFileName, resultSb.toString(), flag); + } else { + LoggerManager.getPlaintTextLogger().info(resultSb.toString()); + } + resultSb.setLength(0); + } + } + } + return preIdx; + } + + protected String formatResultMeta(List> metaData) { + + StringBuilder outputBuilder = new StringBuilder(); + + if (metaData == null || metaData.size() == 0) { + return null; + } + + List titles = new ArrayList<>(); + + // gather keys as title + for (LinkedHashMap mapElement : metaData) { + if (mapElement == null || mapElement.size() == 0) { + continue; + } + + Set> entrySet = mapElement.entrySet(); + if (entrySet == null) { + break; + } + for (Map.Entry entry : entrySet) { + String key = entry.getKey(); + if (key != null && !titles.contains(key)) { + titles.add(key); + outputBuilder.append(key).append("\t"); + } + } + } + + outputBuilder.append(System.lineSeparator()); + + // gather value and print to output + for (LinkedHashMap mapElement : metaData) { + if (mapElement == null || mapElement.size() == 0) { + continue; + } + String candidate; + for (String title : titles) { + if (mapElement.containsKey(title)) { + candidate = mapElement.get(title); + } else { + candidate = "NULL"; + } + outputBuilder.append(candidate).append("\t"); + } + outputBuilder.append(System.lineSeparator()); + } + return outputBuilder.toString(); + } + + protected String formatResultContent( + List> metaData, List> contentData) { + + StringBuilder outputBuilder = new StringBuilder(); + if (contentData == null || contentData.size() == 0) { // finished + return null; + } + + int listLen = contentData.size(); + for (int i = 0; i < listLen; i++) { + List listElement = contentData.get(i); + if (listElement == null || listElement.size() == 0) { + continue; + } + for (String element : listElement) { + outputBuilder.append(element).append("\t"); + } + if (i < listLen - 1) { + outputBuilder.append(System.lineSeparator()); + } + } + + return outputBuilder.toString(); + } + + protected String formatResultIndicator(LinkisResultModel model) { + StringBuilder infoBuilder = new StringBuilder(); + String extraMsgStr = ""; + + if (model.getExtraMessage() != null) { + extraMsgStr = model.getExtraMessage(); + } + if (model.getJobStatus().isJobSuccess()) { + + LoggerManager.getInformationLogger() + .info("Job execute successfully! Will try get execute result"); + infoBuilder + .append("============Result:================") + .append(System.lineSeparator()) + .append("TaskId:") + .append(model.getJobID()) + .append(System.lineSeparator()) + .append("ExecId: ") + .append(model.getExecID()) + .append(System.lineSeparator()) + .append("User:") + .append(model.getUser()) + .append(System.lineSeparator()) + .append("Current job status:") + .append(model.getJobStatus()) + .append(System.lineSeparator()) + .append("extraMsg: ") + .append(extraMsgStr) + .append(System.lineSeparator()) + .append("result: ") + .append(extraMsgStr) + .append(System.lineSeparator()); + } else if (model.getJobStatus().isJobFinishedState()) { + LoggerManager.getInformationLogger().info("Job failed! Will not try get execute result."); + infoBuilder + .append("============Result:================") + .append(System.lineSeparator()) + .append("TaskId:") + .append(model.getJobID()) + .append(System.lineSeparator()) + .append("ExecId: ") + .append(model.getExecID()) + .append(System.lineSeparator()) + .append("User:") + .append(model.getUser()) + .append(System.lineSeparator()) + .append("Current job status:") + .append(model.getJobStatus()) + .append(System.lineSeparator()) + .append("extraMsg: ") + .append(extraMsgStr) + .append(System.lineSeparator()); + if (model.getErrCode() != null) { + infoBuilder.append("errCode: ").append(model.getErrCode()).append(System.lineSeparator()); + } + if (StringUtils.isNotBlank(model.getErrDesc())) { + infoBuilder.append("errDesc: ").append(model.getErrDesc()).append(System.lineSeparator()); + } + } else { + throw new PresenterException( + "PST0011", + ErrorLevel.ERROR, + CommonErrMsg.PresenterErr, + "Job is not completed but triggered ResultPresenter"); + } + return infoBuilder.toString(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java similarity index 72% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java index 789b344501..9a54699165 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/PlainTextFileWriter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java @@ -15,34 +15,21 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present.display; +package org.apache.linkis.cli.application.present.file; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.PresenterException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.present.display.data.DisplayData; -import org.apache.linkis.cli.core.present.display.data.FileDisplayData; +import org.apache.linkis.cli.application.exception.PresenterException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import java.io.BufferedWriter; import java.io.File; import java.io.FileOutputStream; import java.io.OutputStreamWriter; -public class PlainTextFileWriter implements DisplayOperator { - @Override - public void doOutput(DisplayData data) { - if (!(data instanceof FileDisplayData)) { - throw new PresenterException( - "PST0004", - ErrorLevel.ERROR, - CommonErrMsg.PresentDriverErr, - "input data is not instance of FileDisplayData"); - } +public class ResultFileWriter { - String pathName = ((FileDisplayData) data).getPathName(); - String fileName = pathName + File.separator + ((FileDisplayData) data).getFileName(); - String content = ((FileDisplayData) data).getContent(); - Boolean overWrite = ((FileDisplayData) data).getCreateNewFile(); + public static void writeToFile( + String pathName, String fileName, String content, Boolean overWrite) { File dir = new File(pathName); File file = new File(fileName); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/model/HelpInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/HelpInfoModel.java similarity index 77% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/model/HelpInfoModel.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/HelpInfoModel.java index c2b5f5e772..46dcec2d5e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/model/HelpInfoModel.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/HelpInfoModel.java @@ -15,13 +15,13 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present.model; +package org.apache.linkis.cli.application.present.model; -import org.apache.linkis.cli.common.entity.command.CmdTemplate; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.command.CmdTemplate; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; public class HelpInfoModel implements Model { CmdTemplate template; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java new file mode 100644 index 0000000000..b428a788dd --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisJobInfoModel.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cli.application.present.model; + +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; + +import java.util.Date; + +public class LinkisJobInfoModel implements Model { + + private String cid; + private String jobId; + private String message; + private String exception; + private String cause; + + private String taskID; + private String instance; + private String simpleExecId; + private String execId; + private String umUser; + private String executionCode; + private String logPath; + private JobStatus status; + private String engineType; + private String runType; + private Long costTime; + private Date createdTime; + private Date updatedTime; + private Date engineStartTime; + private Integer errCode; + private String errMsg; + private String executeApplicationName; + private String requestApplicationName; + private Float progress; + + @Override + public void buildModel(Object data) { + if (!(data instanceof LinkisOperResultAdapter)) { + throw new TransformerException( + "TFM0010", + ErrorLevel.ERROR, + CommonErrMsg.TransformerException, + "Failed to init LinkisJobInfoModel: " + + data.getClass().getCanonicalName() + + "is not instance of \"LinkisOperResultAdapter\""); + } + this.jobId = ((LinkisOperResultAdapter) data).getJobID(); + this.taskID = ((LinkisOperResultAdapter) data).getJobID(); + this.instance = ((LinkisOperResultAdapter) data).getInstance(); + this.simpleExecId = ((LinkisOperResultAdapter) data).getSimpleExecId(); + this.execId = ((LinkisOperResultAdapter) data).getStrongerExecId(); + this.umUser = ((LinkisOperResultAdapter) data).getUmUser(); + this.executionCode = ((LinkisOperResultAdapter) data).getExecutionCode(); + this.logPath = ((LinkisOperResultAdapter) data).getLogPath(); + this.status = ((LinkisOperResultAdapter) data).getJobStatus(); + this.engineType = ((LinkisOperResultAdapter) data).getEngineType(); + this.runType = ((LinkisOperResultAdapter) data).getRunType(); + this.costTime = ((LinkisOperResultAdapter) data).getCostTime(); + this.createdTime = ((LinkisOperResultAdapter) data).getCreatedTime(); + this.updatedTime = ((LinkisOperResultAdapter) data).getUpdatedTime(); + this.engineStartTime = ((LinkisOperResultAdapter) data).getEngineStartTime(); + this.errCode = ((LinkisOperResultAdapter) data).getErrCode(); + this.errMsg = ((LinkisOperResultAdapter) data).getErrDesc(); + this.executeApplicationName = ((LinkisOperResultAdapter) data).getExecuteApplicationName(); + this.requestApplicationName = ((LinkisOperResultAdapter) data).getRequestApplicationName(); + this.progress = ((LinkisOperResultAdapter) data).getJobProgress(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java similarity index 74% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java index 503c778339..e580547887 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisLogModel.java @@ -17,18 +17,18 @@ package org.apache.linkis.cli.application.present.model; -import org.apache.linkis.cli.application.interactor.job.data.LinkisLogData; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.LogData; public class LinkisLogModel implements Model { - private LinkisLogData data; + private LogData data; @Override public void buildModel(Object data) { - if (!(data instanceof LinkisLogData)) { + if (!(data instanceof LogData)) { throw new TransformerException( "TFM0010", ErrorLevel.ERROR, @@ -37,7 +37,7 @@ public void buildModel(Object data) { + data.getClass().getCanonicalName() + "is not instance of \"LinkisLogData\""); } - this.data = (LinkisLogData) data; + this.data = (LogData) data; } public String consumeLog() { @@ -45,6 +45,6 @@ public String consumeLog() { } public boolean logFinReceived() { - return data.logFinReceived(); + return data.isLogFin(); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java similarity index 63% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java index 1dce057c3e..fd9e8eb9a2 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/model/LinkisResultModel.java @@ -17,22 +17,22 @@ package org.apache.linkis.cli.application.present.model; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultData; -import org.apache.linkis.cli.application.interactor.job.data.LinkisResultSet; -import org.apache.linkis.cli.common.entity.job.JobStatus; -import org.apache.linkis.cli.common.entity.present.Model; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.TransformerException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.entity.job.JobStatus; +import org.apache.linkis.cli.application.entity.present.Model; +import org.apache.linkis.cli.application.exception.TransformerException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; +import org.apache.linkis.cli.application.interactor.job.common.ResultData; +import org.apache.linkis.cli.application.interactor.job.common.ResultSet; import java.util.List; public class LinkisResultModel implements Model { - private LinkisResultData data; + private ResultData data; @Override public void buildModel(Object data) { - if (!(data instanceof LinkisResultData)) { + if (!(data instanceof ResultData)) { throw new TransformerException( "TFM0010", ErrorLevel.ERROR, @@ -41,15 +41,15 @@ public void buildModel(Object data) { + data.getClass().getCanonicalName() + "is not instance of \"LinkisResultData\""); } - this.data = (LinkisResultData) data; + this.data = (ResultData) data; } - public List consumeResultContent() { + public List consumeResultContent() { return data.consumeResultContent(); } public boolean resultFinReceived() { - return data.resultFinReceived(); + return data.isResultFin(); } public JobStatus getJobStatus() { @@ -67,4 +67,20 @@ public String getUser() { public boolean hasResult() { return data.hasResult(); } + + public String getExecID() { + return data.getExecID(); + } + + public Integer getErrCode() { + return data.getErrCode(); + } + + public String getErrDesc() { + return data.getErrDesc(); + } + + public String getExtraMessage() { + return data.getExtraMessage(); + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/CliUtils.java similarity index 65% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/CliUtils.java index 82a3d8bff2..3f8d86d48e 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/utils/ExecutionUtils.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/CliUtils.java @@ -17,31 +17,38 @@ package org.apache.linkis.cli.application.utils; -import org.apache.linkis.cli.application.constants.AppKeys; +import org.apache.linkis.cli.application.constants.CliKeys; import org.apache.linkis.cli.application.constants.LinkisConstants; -import org.apache.linkis.cli.common.entity.var.VarAccess; -import org.apache.linkis.cli.common.exception.error.ErrorLevel; -import org.apache.linkis.cli.core.exception.BuilderException; -import org.apache.linkis.cli.core.exception.error.CommonErrMsg; -import org.apache.linkis.cli.core.utils.LogUtils; +import org.apache.linkis.cli.application.entity.var.VarAccess; +import org.apache.linkis.cli.application.exception.BuilderException; +import org.apache.linkis.cli.application.exception.error.CommonErrMsg; +import org.apache.linkis.cli.application.exception.error.ErrorLevel; import org.apache.commons.lang3.StringUtils; import java.io.*; +import java.util.HashMap; +import java.util.Map; import java.util.Set; -public class ExecutionUtils { +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +public class CliUtils { + public static final Gson GSON = + new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create(); + public static String getSubmitUser(VarAccess stdVarAccess, String osUser, Set adminSet) { String enableSpecifyUserStr = stdVarAccess.getVar( - String.class, AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_USER_SPECIFICATION); + String.class, CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_USER_SPECIFICATION); Boolean enableSpecifyUser = Boolean.parseBoolean(enableSpecifyUserStr) || adminSet.contains(osUser); String authenticationStrategy = stdVarAccess.getVarOrDefault( String.class, - AppKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, + CliKeys.LINKIS_COMMON_AUTHENTICATION_STRATEGY, LinkisConstants.AUTH_STRATEGY_STATIC); String submitUsr; @@ -53,8 +60,8 @@ public static String getSubmitUser(VarAccess stdVarAccess, String osUser, Set adminSet) { String enableSpecifyPRoxyUserStr = - stdVarAccess.getVar(String.class, AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_PROXY_USER); + stdVarAccess.getVar(String.class, CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE_ENABLE_PROXY_USER); Boolean enableSpecifyProxyUser = Boolean.parseBoolean(enableSpecifyPRoxyUserStr) || adminSet.contains(submitUsr); @@ -129,8 +136,8 @@ public static String getProxyUser( String proxyUsr; if (enableSpecifyProxyUser) { - if (stdVarAccess.hasVar(AppKeys.JOB_COMMON_PROXY_USER)) { - proxyUsr = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_PROXY_USER); + if (stdVarAccess.hasVar(CliKeys.JOB_COMMON_PROXY_USER)) { + proxyUsr = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_PROXY_USER); if (!adminSet.contains(submitUsr) && adminSet.contains(proxyUsr)) { throw new BuilderException( "BLD0010", @@ -140,14 +147,14 @@ public static String getProxyUser( } } else { proxyUsr = submitUsr; - LogUtils.getInformationLogger() + LoggerManager.getInformationLogger() .info( "user does not specify proxy-user, will use current submit-user \"" + submitUsr + "\" by default."); } - } else if (stdVarAccess.hasVar(AppKeys.JOB_COMMON_PROXY_USER)) { - proxyUsr = stdVarAccess.getVar(String.class, AppKeys.JOB_COMMON_PROXY_USER); + } else if (stdVarAccess.hasVar(CliKeys.JOB_COMMON_PROXY_USER)) { + proxyUsr = stdVarAccess.getVar(String.class, CliKeys.JOB_COMMON_PROXY_USER); if (!StringUtils.equals(proxyUsr, submitUsr)) { throw new BuilderException( "BLD0010", @@ -157,7 +164,7 @@ public static String getProxyUser( } } else { proxyUsr = submitUsr; - LogUtils.getInformationLogger() + LoggerManager.getInformationLogger() .info( "user does not specify proxy-user, will use current submit-user \"" + proxyUsr @@ -199,4 +206,66 @@ public static String readFile(String path) { e); } } + + public static T castStringToAny(Class clazz, String val) { + if (StringUtils.isBlank(val)) { + return null; + } + T ret = null; + if (clazz == Object.class) { + ret = clazz.cast(val); + } else if (clazz == String.class) { + ret = clazz.cast(val); + } else if (clazz == Integer.class) { + ret = clazz.cast(Integer.parseInt(val)); + } else if (clazz == Double.class) { + ret = clazz.cast(Double.parseDouble(val)); + } else if (clazz == Float.class) { + ret = clazz.cast(Float.parseFloat(val)); + } else if (clazz == Long.class) { + ret = clazz.cast(Long.parseLong(val)); + } else if (clazz == Boolean.class) { + ret = clazz.cast(Boolean.parseBoolean(val)); + } + return ret; + } + + public static Map parseKVStringToMap(String kvStr, String separator) { + if (StringUtils.isBlank(separator)) { + separator = ","; + } + if (StringUtils.isBlank(kvStr)) { + return null; + } + Map argsProps = new HashMap<>(); + String[] args = StringUtils.splitByWholeSeparator(kvStr, separator); + for (String arg : args) { + int index = arg.indexOf("="); + if (index != -1) { + argsProps.put(arg.substring(0, index).trim(), arg.substring(index + 1).trim()); + } + } + + return argsProps; + } + + public static boolean isValidExecId(String execId) { + boolean ret = false; + if (StringUtils.isNotBlank(execId)) { + ret = true; + } + return ret; + } + + public static String progressInPercentage(float progress) { + return String.valueOf(progress * 100) + "%"; + } + + public static void doSleepQuietly(Long sleepMills) { + try { + Thread.sleep(sleepMills); + } catch (Exception ignore) { + // ignored + } + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/LogUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/LoggerManager.java similarity index 93% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/LogUtils.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/LoggerManager.java index b8147a4a60..bf302e9e85 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/LogUtils.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/LoggerManager.java @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils; +package org.apache.linkis.cli.application.utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -public class LogUtils { +public class LoggerManager { private static final Logger PlainTextLogger = LoggerFactory.getLogger("PlaintTextLogger"); private static final Logger InformationLogger = LoggerFactory.getLogger("InformationLogger"); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/SchedulerUtils.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/SchedulerManager.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/SchedulerUtils.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/SchedulerManager.java index 5cfce48628..48aa367959 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/utils/SchedulerUtils.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/utils/SchedulerManager.java @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.utils; +package org.apache.linkis.cli.application.utils; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicInteger; -public class SchedulerUtils { +public class SchedulerManager { private static ExecutorService fixedThreadPool; private static ThreadPoolExecutor cachedThreadPool; private static int THREAD_NUM = 5; @@ -62,7 +62,7 @@ public static ExecutorService newFixedThreadPool( public static ThreadPoolExecutor getCachedThreadPoolExecutor() { if (cachedThreadPool == null) { - synchronized (SchedulerUtils.class) { + synchronized (SchedulerManager.class) { if (cachedThreadPool == null) { cachedThreadPool = newCachedThreadPool(THREAD_NUM, THREAD_NAME, IS_DEAMON); } @@ -73,7 +73,7 @@ public static ThreadPoolExecutor getCachedThreadPoolExecutor() { public static ExecutorService getFixedThreadPool() { if (fixedThreadPool == null) { - synchronized (SchedulerUtils.class) { + synchronized (SchedulerManager.class) { if (fixedThreadPool == null) { fixedThreadPool = newFixedThreadPool(THREAD_NUM, THREAD_NAME, IS_DEAMON); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/resources/version.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/main/resources/version.properties new file mode 100644 index 0000000000..0da37e5c03 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/resources/version.properties @@ -0,0 +1,16 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +cli.version=${project.version} \ No newline at end of file diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java similarity index 88% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java index 0af2226661..d7ca2c0349 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/LinkisClientApplicationTest.java @@ -28,26 +28,36 @@ public class LinkisClientApplicationTest { String[] cmdStr; String[] cmdStr2; + String[] cmdStr3; @BeforeEach public void before() { System.setProperty("conf.root", "src/test/resources/conf/"); System.setProperty("user.name", "hadoop"); + cmdStr3 = new String[] {"--version"}; cmdStr2 = new String[] { - // "--gatewayUrl", "http://127.0.0.1:8090", - // "--authStg", "token", - // "--authKey", "Validation-Code", - // "--authVal", "BML-AUTH", - // "job", - // "kill", - // "-j", "1121", - // "-submitUser", "user", - // "-proxyUser", "user", - - // "-varMap", "name=\"tables\"", - // "-varMap", "name=\"databases\"" - + "--gatewayUrl", + "http://127.0.0.1:9001", + "--authStg", + "token", + "--authKey", + "Validation-Code", + "--authVal", + "BML-AUTH", + "--status", + // "--log", + // "--kill", + // "--result", + "5773107", + "-submitUser", + "hadoop", + "-proxyUser", + "hadoop", + "-varMap", + "name=\"tables\"", + "-varMap", + "name2=\"databases\"" }; cmdStr = new String[] { @@ -99,6 +109,7 @@ public void before() { "-codeType", "shell", "-code", + // "exit -1", "whoami", // "-engineType", "spark-2.4.3", @@ -165,7 +176,8 @@ public void testProcessInput() { /** Method: exec(ProcessedData data) */ @Test public void testExec() { - // LinkisClientApplication.main(cmdStr); + // LinkisClientApplication.main(cmdStr); + LinkisClientApplication.main(cmdStr3); // LinkisClientApplication.main(cmdStr2); /* try { diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliConstantsTest.java similarity index 71% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliConstantsTest.java index bb16d43e47..b576c89f4f 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppConstantsTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliConstantsTest.java @@ -21,25 +21,25 @@ import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class AppConstantsTest { +public class CliConstantsTest { @Test @DisplayName("constTest") public void constTest() { - String defaultConfigName = AppConstants.DEFAULT_CONFIG_NAME; - Long jobQuerySleepMills = AppConstants.JOB_QUERY_SLEEP_MILLS; - String resultsetLogo = AppConstants.RESULTSET_LOGO; - String resultsetMetaBeginLogo = AppConstants.RESULTSET_META_BEGIN_LOGO; - String resultsetMetaEndLogo = AppConstants.RESULTSET_META_END_LOGO; - String resultsetSeparatorLogo = AppConstants.RESULTSET_SEPARATOR_LOGO; - Integer resultsetPageSize = AppConstants.RESULTSET_PAGE_SIZE; - String jobCreatorDefault = AppConstants.JOB_CREATOR_DEFAULT; - String jobCreatorAsyncDefault = AppConstants.JOB_CREATOR_ASYNC_DEFAULT; - String dummyCid = AppConstants.DUMMY_CID; - String linkisCli = AppConstants.LINKIS_CLI; - String ujesMode = AppConstants.UJES_MODE; - String onceMode = AppConstants.ONCE_MODE; + String defaultConfigName = CliConstants.DEFAULT_CONFIG_NAME; + Long jobQuerySleepMills = CliConstants.JOB_QUERY_SLEEP_MILLS; + String resultsetLogo = CliConstants.RESULTSET_LOGO; + String resultsetMetaBeginLogo = CliConstants.RESULTSET_META_BEGIN_LOGO; + String resultsetMetaEndLogo = CliConstants.RESULTSET_META_END_LOGO; + String resultsetSeparatorLogo = CliConstants.RESULTSET_SEPARATOR_LOGO; + Integer resultsetPageSize = CliConstants.RESULTSET_PAGE_SIZE; + String jobCreatorDefault = CliConstants.JOB_CREATOR_DEFAULT; + String jobCreatorAsyncDefault = CliConstants.JOB_CREATOR_ASYNC_DEFAULT; + String dummyCid = CliConstants.DUMMY_CID; + String linkisCli = CliConstants.LINKIS_CLI; + String ujesMode = CliConstants.UJES_MODE; + String onceMode = CliConstants.ONCE_MODE; Assertions.assertEquals("linkis-cli.properties", defaultConfigName); Assertions.assertTrue(2000L == jobQuerySleepMills.longValue()); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppKeysTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliKeysTest.java similarity index 73% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppKeysTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliKeysTest.java index f2477149b8..3fc7b84a0a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/AppKeysTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/CliKeysTest.java @@ -21,27 +21,27 @@ import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class AppKeysTest { +public class CliKeysTest { @Test @DisplayName("constTest") public void constTest() { - String adminUsers = AppKeys.ADMIN_USERS; - String linkisClientNoncustomizable = AppKeys.LINKIS_CLIENT_NONCUSTOMIZABLE; - String logPathKey = AppKeys.LOG_PATH_KEY; - String logFileKey = AppKeys.LOG_FILE_KEY; - String clientConfigRootKey = AppKeys.CLIENT_CONFIG_ROOT_KEY; - String defaultConfigFileNameKey = AppKeys.DEFAULT_CONFIG_FILE_NAME_KEY; - String linuxUserKey = AppKeys.LINUX_USER_KEY; - String jobExec = AppKeys.JOB_EXEC; - String jobExecCode = AppKeys.JOB_EXEC_CODE; - String jobContent = AppKeys.JOB_CONTENT; - String jobSource = AppKeys.JOB_SOURCE; - String jobParamConf = AppKeys.JOB_PARAM_CONF; - String jobParamRuntime = AppKeys.JOB_PARAM_RUNTIME; - String jobParamVar = AppKeys.JOB_PARAM_VAR; - String jobLabel = AppKeys.JOB_LABEL; + String adminUsers = CliKeys.ADMIN_USERS; + String linkisClientNoncustomizable = CliKeys.LINKIS_CLIENT_NONCUSTOMIZABLE; + String logPathKey = CliKeys.LOG_PATH_KEY; + String logFileKey = CliKeys.LOG_FILE_KEY; + String clientConfigRootKey = CliKeys.CLIENT_CONFIG_ROOT_KEY; + String defaultConfigFileNameKey = CliKeys.DEFAULT_CONFIG_FILE_NAME_KEY; + String linuxUserKey = CliKeys.LINUX_USER_KEY; + String jobExec = CliKeys.JOB_EXEC; + String jobExecCode = CliKeys.JOB_EXEC_CODE; + String jobContent = CliKeys.JOB_CONTENT; + String jobSource = CliKeys.JOB_SOURCE; + String jobParamConf = CliKeys.JOB_PARAM_CONF; + String jobParamRuntime = CliKeys.JOB_PARAM_RUNTIME; + String jobParamVar = CliKeys.JOB_PARAM_VAR; + String jobLabel = CliKeys.JOB_LABEL; Assertions.assertEquals("hadoop,root,shangda", adminUsers); Assertions.assertEquals("wds.linkis.client.noncustomizable", linkisClientNoncustomizable); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisConstantsTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/LinkisKeysTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/constants/TestConstants.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/CliCmdTypeTest.java similarity index 86% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdTypeTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/CliCmdTypeTest.java index 16f47274d4..fecda11db7 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/LinkisCmdTypeTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/CliCmdTypeTest.java @@ -21,15 +21,15 @@ import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class LinkisCmdTypeTest { +public class CliCmdTypeTest { @Test @DisplayName("enumTest") public void enumTest() { - int id = LinkisCmdType.UNIVERSAL.getId(); - String name = LinkisCmdType.UNIVERSAL.getName(); - String desc = LinkisCmdType.UNIVERSAL.getDesc(); + int id = CliCmdType.UNIVERSAL.getId(); + String name = CliCmdType.UNIVERSAL.getName(); + String desc = CliCmdType.UNIVERSAL.getDesc(); Assertions.assertTrue(1 == id); Assertions.assertEquals("linkis-cli", name); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java similarity index 96% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java index 54a5952f09..0155ee5029 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestCmdType.java @@ -18,7 +18,7 @@ package org.apache.linkis.cli.application.interactor.command.template; import org.apache.linkis.cli.application.constants.TestConstants; -import org.apache.linkis.cli.common.entity.command.CmdType; +import org.apache.linkis.cli.application.entity.command.CmdType; public enum TestCmdType implements CmdType { TEST_PRIMARY(TestConstants.PRIMARY_COMMAND, 1, TestConstants.SPARK_DESC), diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java similarity index 78% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java index 1a8959f097..a077cfcca7 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestParamMapper.java @@ -17,19 +17,19 @@ package org.apache.linkis.cli.application.interactor.command.template; -import org.apache.linkis.cli.application.constants.AppKeys; +import org.apache.linkis.cli.application.constants.CliKeys; import org.apache.linkis.cli.application.constants.TestConstants; -import org.apache.linkis.cli.core.interactor.command.parser.transformer.ParamKeyMapper; +import org.apache.linkis.cli.application.interactor.command.parser.transformer.ParamKeyMapper; public class TestParamMapper extends ParamKeyMapper { @Override public void initMapperRules() { - super.updateMapping(TestConstants.PARAM_COMMON_CMD, AppKeys.JOB_EXEC_CODE); - super.updateMapping(TestConstants.PARAM_PROXY, AppKeys.LINKIS_COMMON_GATEWAY_URL); - super.updateMapping(TestConstants.PARAM_USER, AppKeys.LINKIS_COMMON_TOKEN_KEY); - super.updateMapping(TestConstants.PARAM_USR_CONF, AppKeys.LINKIS_CLIENT_USER_CONFIG); - super.updateMapping(TestConstants.PARAM_PASSWORD, AppKeys.LINKIS_COMMON_TOKEN_VALUE); - super.updateMapping(TestConstants.PARAM_PROXY_USER, AppKeys.JOB_COMMON_PROXY_USER); + super.updateMapping(TestConstants.PARAM_COMMON_CMD, CliKeys.JOB_EXEC_CODE); + super.updateMapping(TestConstants.PARAM_PROXY, CliKeys.LINKIS_COMMON_GATEWAY_URL); + super.updateMapping(TestConstants.PARAM_USER, CliKeys.LINKIS_COMMON_TOKEN_KEY); + super.updateMapping(TestConstants.PARAM_USR_CONF, CliKeys.LINKIS_CLIENT_USER_CONFIG); + super.updateMapping(TestConstants.PARAM_PASSWORD, CliKeys.LINKIS_COMMON_TOKEN_VALUE); + super.updateMapping(TestConstants.PARAM_PROXY_USER, CliKeys.JOB_COMMON_PROXY_USER); updateMapping( TestConstants.PARAM_SPARK_EXECUTOR_CORES, TestConstants.LINKIS_SPARK_EXECUTOR_CORES); @@ -43,7 +43,7 @@ public void initMapperRules() { updateMapping( TestConstants.PARAM_SPARK_SHUFFLE_PARTITIONS, TestConstants.LINKIS_SPARK_SHUFFLE_PARTITIONS); - updateMapping(TestConstants.PARAM_SPARK_RUNTYPE, AppKeys.JOB_LABEL_CODE_TYPE); + updateMapping(TestConstants.PARAM_SPARK_RUNTYPE, CliKeys.JOB_LABEL_CODE_TYPE); updateMapping(TestConstants.PARAM_YARN_QUEUE, TestConstants.YARN_QUEUE); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java similarity index 94% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java index c9274fbf71..fb53803b6a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/command/template/TestSparkCmdTemplate.java @@ -17,11 +17,10 @@ package org.apache.linkis.cli.application.interactor.command.template; -import org.apache.linkis.cli.application.constants.AppKeys; +import org.apache.linkis.cli.application.constants.CliKeys; import org.apache.linkis.cli.application.constants.TestConstants; -import org.apache.linkis.cli.core.exception.CommandException; -import org.apache.linkis.cli.core.interactor.command.template.AbstractCmdTemplate; -import org.apache.linkis.cli.core.interactor.command.template.option.StdOption; +import org.apache.linkis.cli.application.exception.CommandException; +import org.apache.linkis.cli.application.interactor.command.template.option.StdOption; import java.util.HashMap; import java.util.Map; @@ -50,7 +49,7 @@ public class TestSparkCmdTemplate extends AbstractCmdTemplate { protected StdOption userOp = option( null, - AppKeys.JOB_COMMON_SUBMIT_USER, + CliKeys.JOB_COMMON_SUBMIT_USER, new String[] {"-u", "--user"}, "specify user", true, @@ -67,7 +66,7 @@ public class TestSparkCmdTemplate extends AbstractCmdTemplate { private StdOption passwordOp = option( null, - AppKeys.JOB_COMMON_SUBMIT_PASSWORD, + CliKeys.JOB_COMMON_SUBMIT_PASSWORD, new String[] {"-pwd", "--passwd"}, "specify user password", true, @@ -95,7 +94,7 @@ public class TestSparkCmdTemplate extends AbstractCmdTemplate { private StdOption> confMap = option( null, - AppKeys.JOB_PARAM_CONF, + CliKeys.JOB_PARAM_CONF, new String[] {"-confMap"}, "confMap", true, diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java index 1b3b7c67ec..ff71dd0c43 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/LinkisJobStatusTest.java @@ -17,6 +17,8 @@ package org.apache.linkis.cli.application.interactor.job; +import org.apache.linkis.cli.application.interactor.job.common.LinkisJobStatus; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/KeyParserTest.java similarity index 81% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtilsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/KeyParserTest.java index cc579bec17..4c6c494c20 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/ProcessKeyUtilsTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/builder/KeyParserTest.java @@ -17,7 +17,8 @@ package org.apache.linkis.cli.application.interactor.job.builder; -import org.apache.linkis.cli.application.constants.AppKeys; +import org.apache.linkis.cli.application.constants.CliKeys; +import org.apache.linkis.cli.application.interactor.job.common.KeyParser; import java.util.HashMap; import java.util.Map; @@ -26,17 +27,17 @@ import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class ProcessKeyUtilsTest { +public class KeyParserTest { @Test @DisplayName("removePrefixForKeysInMapTest") public void removePrefixForKeysInMapTest() { Map map = new HashMap<>(); - map.put(AppKeys.JOB_PARAM_CONF, new Object()); + map.put(CliKeys.JOB_PARAM_CONF, new Object()); map.put("name", new Object()); - Map stringObjectMap = ProcessKeyUtils.removePrefixForKeysInMap(map); + Map stringObjectMap = KeyParser.removePrefixForKeysInMap(map); Assertions.assertTrue(1 == stringObjectMap.size()); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubTypeTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/JobCmdSubTypeTest.java similarity index 75% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubTypeTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/JobCmdSubTypeTest.java index 699eaaf8b0..c720e7f2c2 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisManSubTypeTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/interactor/job/subtype/JobCmdSubTypeTest.java @@ -17,22 +17,24 @@ package org.apache.linkis.cli.application.interactor.job.subtype; +import org.apache.linkis.cli.application.interactor.job.jobcmd.JobCmdSubType; + import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Test; -public class LinkisManSubTypeTest { +public class JobCmdSubTypeTest { @Test @DisplayName("enumTest") public void enumTest() { - String killName = LinkisManSubType.KILL.getName(); - String logName = LinkisManSubType.LOG.getName(); - String descName = LinkisManSubType.DESC.getName(); - String statusName = LinkisManSubType.STATUS.getName(); - String listName = LinkisManSubType.LIST.getName(); - String resultName = LinkisManSubType.RESULT.getName(); + String killName = JobCmdSubType.KILL.getName(); + String logName = JobCmdSubType.LOG.getName(); + String descName = JobCmdSubType.DESC.getName(); + String statusName = JobCmdSubType.STATUS.getName(); + String listName = JobCmdSubType.LIST.getName(); + String resultName = JobCmdSubType.RESULT.getName(); Assertions.assertEquals("kill", killName); Assertions.assertEquals("log", logName); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/once/OnceJobConstantsTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/operator/ujes/UJESConstantsTest.java diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java similarity index 95% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java index 5af0e6ab85..8e07a5a6bb 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/test/java/org/apache/linkis/cli/application/utils/UtilsTest.java @@ -28,7 +28,7 @@ public class UtilsTest { public void isValidExecIdTest() { String execId = "0001"; - boolean validExecId = Utils.isValidExecId(execId); + boolean validExecId = CliUtils.isValidExecId(execId); Assertions.assertTrue(validExecId); } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/linkis-cli.properties rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/linkis-cli.properties diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/user.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/user.properties similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/conf/user.properties rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/conf/user.properties diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/linkis-cli.properties b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/linkis-cli.properties rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/linkis-cli.properties diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/log4j2.xml b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/log4j2.xml similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/log4j2.xml rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/log4j2.xml diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/testScala.scala b/linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/testScala.scala similarity index 100% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/resources/testScala.scala rename to linkis-computation-governance/linkis-client/linkis-cli/src/test/resources/testScala.scala diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala index 3ac3cb7c88..45f3f49bed 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/LinkisManagerClient.scala @@ -17,7 +17,9 @@ package org.apache.linkis.computation.client.once +import org.apache.linkis.common.utils.Utils import org.apache.linkis.computation.client.once.action.{ + AskEngineConnAction, CreateEngineConnAction, EngineConnOperateAction, GetEngineConnAction, @@ -25,6 +27,7 @@ import org.apache.linkis.computation.client.once.action.{ LinkisManagerAction } import org.apache.linkis.computation.client.once.result.{ + AskEngineConnResult, CreateEngineConnResult, EngineConnOperateResult, GetEngineConnResult, @@ -39,6 +42,8 @@ import java.io.Closeable trait LinkisManagerClient extends Closeable { + def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult + def createEngineConn(createEngineConnAction: CreateEngineConnAction): CreateEngineConnResult def getEngineConn(getEngineConnAction: GetEngineConnAction): GetEngineConnResult @@ -82,7 +87,21 @@ class LinkisManagerClientImpl(ujesClient: UJESClient) extends LinkisManagerClien override def executeEngineConnOperation( engineConnOperateAction: EngineConnOperateAction - ): EngineConnOperateResult = execute(engineConnOperateAction) + ): EngineConnOperateResult = { + Utils.tryCatch { + val rs = execute[EngineConnOperateResult](engineConnOperateAction) + rs + } { case e: Exception => + val rs = new EngineConnOperateResult + rs.setIsError(true) + rs.setErrorMsg(e.getMessage) + rs + } + } override def close(): Unit = ujesClient.close() + + override def askEngineConn(askEngineConnAction: AskEngineConnAction): AskEngineConnResult = + execute(askEngineConnAction) + } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/AskEngineConnAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/AskEngineConnAction.scala new file mode 100644 index 0000000000..4b89b53764 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/action/AskEngineConnAction.scala @@ -0,0 +1,101 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.action + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.request.POSTAction +import org.apache.linkis.ujes.client.exception.UJESJobException + +import org.apache.commons.lang3.StringUtils + +import java.util + +class AskEngineConnAction extends POSTAction with LinkisManagerAction { + + override def getRequestPayload: String = + DWSHttpClient.jacksonJson.writeValueAsString(getRequestPayloads) + + override def suffixURLs: Array[String] = Array("linkisManager", "askEngineConn") + +} + +object AskEngineConnAction { + + def newBuilder(): Builder = new Builder + + class Builder private[AskEngineConnAction] () { + private var user: String = _ + private var properties: util.Map[String, String] = _ + private var labels: util.Map[String, String] = _ + private var maxSubmitTime: Long = _ + private var createService: String = _ + private var description: String = _ + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setProperties(properties: util.Map[String, String]): Builder = { + this.properties = properties + this + } + + def setLabels(labels: java.util.Map[String, String]): Builder = { + this.labels = labels + this + } + + def setMaxSubmitTime(maxSubmitTime: Long): Builder = { + this.maxSubmitTime = maxSubmitTime + this + } + + def setCreateService(createService: String): Builder = { + this.createService = createService + this + } + + def setDescription(description: String): Builder = { + this.description = description + this + } + + def build(): AskEngineConnAction = { + val action = new AskEngineConnAction() + if (user == null) throw new UJESJobException("user is needed!") + if (properties == null) properties = new java.util.HashMap[String, String] + if (labels == null) throw new UJESJobException("labels is needed!") + action.setUser(user) + action.addRequestPayload("properties", properties) + action.addRequestPayload("labels", labels) + if (StringUtils.isNotBlank(createService)) { + action.addRequestPayload("createService", createService) + } + if (null != maxSubmitTime) { + action.addRequestPayload("timeOut", maxSubmitTime) + } + if (StringUtils.isNotBlank(description)) { + action.addRequestPayload("description", description) + } + action + } + + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/AskEngineConnResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/AskEngineConnResult.scala new file mode 100644 index 0000000000..58c6085b45 --- /dev/null +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/AskEngineConnResult.scala @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.computation.client.once.result + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult + +@DWSHttpMessageResult("/api/rest_j/v\\d+/linkisManager/askEngineConn") +class AskEngineConnResult extends GetEngineConnResult diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala index 1bf12e0418..50df73bd10 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/result/EngineConnOperateResult.scala @@ -33,9 +33,11 @@ class EngineConnOperateResult extends LinkisManagerResult { this.result = result } + def getErrorMsg(): String = errorMsg + def setErrorMsg(errorMsg: String): Unit = this.errorMsg = errorMsg - def setError(isError: Boolean): Unit = this.isError = isError + def getIsError(): Boolean = isError def setIsError(isError: Boolean): Unit = this.isError = isError diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala index baab361b58..e672e7bfe1 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/once/simple/SimpleOnceJob.scala @@ -94,6 +94,10 @@ trait SimpleOnceJob extends OnceJob { case operator => operator } + def getEcServiceInstance: ServiceInstance = serviceInstance + + def getEcTicketId: String = ticketId + } class SubmittableSimpleOnceJob( diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala index 83399bf371..a1dba63404 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/computation/client/operator/impl/EngineConnApplicationInfoOperator.scala @@ -19,6 +19,7 @@ package org.apache.linkis.computation.client.operator.impl import org.apache.linkis.computation.client.once.result.EngineConnOperateResult import org.apache.linkis.computation.client.operator.OnceJobOperator +import org.apache.linkis.governance.common.constant.ec.ECConstants import org.apache.linkis.ujes.client.exception.UJESJobException class EngineConnApplicationInfoOperator extends OnceJobOperator[ApplicationInfo] { @@ -28,7 +29,7 @@ class EngineConnApplicationInfoOperator extends OnceJobOperator[ApplicationInfo] override protected def resultToObject(result: EngineConnOperateResult): ApplicationInfo = { ApplicationInfo( result - .getAsOption("applicationId") + .getAsOption(ECConstants.YARN_APPID_NAME_KEY) .getOrElse( throw new UJESJobException( 20300, @@ -36,14 +37,14 @@ class EngineConnApplicationInfoOperator extends OnceJobOperator[ApplicationInfo] ) ), result - .getAsOption("applicationUrl") + .getAsOption(ECConstants.YARN_APP_URL_KEY) .getOrElse( throw new UJESJobException( 20300, s"Cannot get applicationUrl from EngineConn $getServiceInstance." ) ), - result.getAs("queue") + result.getAs(ECConstants.QUEUE) ) } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala index 6b41b4c62b..9eb748691e 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/request/ResultSetAction.scala @@ -35,6 +35,9 @@ object ResultSetAction { private var pageSize: Int = _ private var charset: String = Configuration.BDP_ENCODING.getValue + // default value is :org.apache.linkis.storage.domain.Dolphin.LINKIS_NULL + private var nullValue: String = "LINKIS_NULL" + def setUser(user: String): Builder = { this.user = user this @@ -60,6 +63,11 @@ object ResultSetAction { this } + def setNullValue(nullValue: String): Builder = { + this.nullValue = nullValue + this + } + def build(): ResultSetAction = { if (user == null) throw new UJESClientBuilderException("user is needed!") if (path == null) throw new UJESClientBuilderException("path is needed!") @@ -68,6 +76,7 @@ object ResultSetAction { if (page > 0) resultSetAction.setParameter("page", page) if (pageSize > 0) resultSetAction.setParameter("pageSize", pageSize) resultSetAction.setParameter("charset", charset) + resultSetAction.setParameter("nullValue", nullValue) resultSetAction.setUser(user) resultSetAction } diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala index fe107a32ca..2de5758aea 100644 --- a/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala +++ b/linkis-computation-governance/linkis-client/linkis-computation-client/src/main/scala/org/apache/linkis/ujes/client/response/OpenLogResult.scala @@ -26,7 +26,7 @@ import scala.beans.BeanProperty class OpenLogResult extends DWSResult { /** - * log[0] - info log[1] - warn log[2] - error log[3] - all (info + warn + error) + * log[0] - error log[1] - warn log[2] - info log[3] - all (info + warn + error) */ @BeanProperty var log: Array[String] = _ diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java new file mode 100644 index 0000000000..13cbac5577 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.entity; + +public class TemplateConfKey { + + private String templateUuid; + + private String key; + + private String templateName; + + private String configValue; + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + @Override + public String toString() { + return "TemplateKey{" + + "templateUuid='" + + templateUuid + + '\'' + + ", key='" + + key + + '\'' + + ", templateName='" + + templateName + + '\'' + + ", configValue='" + + configValue + + '\'' + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubType.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/enums/OnceJobOperationBoundary.java similarity index 78% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubType.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/enums/OnceJobOperationBoundary.java index 49d3d5ea91..37c6fc8d92 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/subtype/LinkisSubmitSubType.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/enums/OnceJobOperationBoundary.java @@ -15,20 +15,18 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.subtype; +package org.apache.linkis.governance.common.enums; -import org.apache.linkis.cli.common.entity.job.JobSubType; - -public enum LinkisSubmitSubType implements JobSubType { - SUBMIT("submit"); +public enum OnceJobOperationBoundary { + ECM("ecm"), + EC("ec"); private String name; - LinkisSubmitSubType(String name) { + OnceJobOperationBoundary(String name) { this.name = name; } - @Override public String getName() { return name; } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java index c0d295755a..89d3c9eba4 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/exception/engineconn/EngineConnExecutorErrorCode.java @@ -37,4 +37,6 @@ public class EngineConnExecutorErrorCode { public static final int SEND_TO_ENTRANCE_ERROR = 40105; public static final int INIT_EXECUTOR_FAILED = 40106; + + public static final int INVALID_APPLICATION_ID = 40107; } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java new file mode 100644 index 0000000000..258f724e82 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.protocol.message.RequestProtocol; + +public class TemplateConfRequest implements RequestProtocol { + + private String templateUuid; + + public TemplateConfRequest(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultData.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java similarity index 67% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultData.java rename to linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java index f8e61a790d..8822fe988d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisResultData.java +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java @@ -15,25 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.data; +package org.apache.linkis.governance.common.protocol.conf; -import java.util.List; - -public interface LinkisResultData extends LinkisJobData, Cloneable { - - String getResultLocation(); +import org.apache.linkis.governance.common.entity.TemplateConfKey; - String[] getResultSetPaths(); - - Boolean hasNextResultPage(); - - List consumeResultContent(); +import java.util.ArrayList; +import java.util.List; - void sendResultFin(); +public class TemplateConfResponse { - boolean resultFinReceived(); + private List list = new ArrayList<>(); - boolean hasResult(); + public List getList() { + return list; + } - void setHasResult(boolean hasResult); + public void setList(List list) { + this.list = list; + } } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/LoggerUtils.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/LoggerUtils.java new file mode 100644 index 0000000000..99addd4fed --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/utils/LoggerUtils.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.utils; + +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.governance.common.constant.job.JobRequestConstants; + +import java.util.Map; + +import org.slf4j.MDC; + +public class LoggerUtils { + + public static void setJobIdMDC(String jobId) { + MDC.put(JobRequestConstants.JOB_ID(), jobId); + } + + public static void setJobIdMDC(Map props) { + if (GovernanceCommonConf.MDC_ENABLED()) { + String jobId = JobUtils.getJobIdFromMap(props); + MDC.put(JobRequestConstants.JOB_ID(), jobId); + } + } + + public static void removeJobIdMDC() { + MDC.remove(JobRequestConstants.JOB_ID()); + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala index 48f906bb8c..0d9b62ff48 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernaceCommonConf.scala @@ -18,6 +18,7 @@ package org.apache.linkis.governance.common.conf import org.apache.linkis.common.conf.{CommonVars, Configuration} +import org.apache.linkis.governance.common.constant.ec.ECConstants object GovernanceCommonConf { @@ -40,10 +41,16 @@ object GovernanceCommonConf { val ENGINE_CONN_MANAGER_SPRING_NAME = CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-engineconnmanager") + val ENGINE_APPLICATION_MANAGER_SPRING_NAME = + CommonVars("wds.linkis.application.manager.name", "linkis-cg-linkismanager") + val ENGINE_CONN_PORT_RANGE = CommonVars("wds.linkis.engineconn.port.range", "-") val MANAGER_SERVICE_NAME = - CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-linkismanager") + CommonVars( + "wds.linkis.engineconn.manager.name", + GovernanceCommonConf.ENGINE_APPLICATION_MANAGER_SPRING_NAME.getValue + ) val ENTRANCE_SERVICE_NAME = CommonVars("wds.linkis.entrance.name", "linkis-cg-entrance") @@ -66,8 +73,17 @@ object GovernanceCommonConf { val ERROR_CODE_DESC_LEN = CommonVars("linkis.error.code.desc.len", 512, "Error code description maximum length").getValue + val FAKE_PROGRESS: Float = CommonVars[Float]("linkis.job.fake.progress", 0.99f).getValue + + val MDC_ENABLED = + CommonVars("linkis.mdc.log.enabled", true, "MDC Switch").getValue + def getEngineEnvValue(envKey: String): String = { CommonVars(envKey, "").getValue } + // value ECConstants.EC_CLIENT_TYPE_ATTACH + val EC_APP_MANAGE_MODE = + CommonVars("linkis.ec.app.manage.mode", "attach") + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala index fe48f6887d..a94eadf422 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/constant/ec/ECConstants.scala @@ -35,4 +35,43 @@ object ECConstants { val YARN_QUEUE_NAME_CONFIG_KEY = "wds.linkis.rm.yarnqueue" + val QUEUE = "queue" + + val EC_CLIENT_TYPE_ATTACH = "attach" + + val EC_CLIENT_TYPE_DETACH = "detach" + + val YARN_APPID_NAME_KEY = "applicationId" + + val YARN_APP_URL_KEY = "applicationUrl" + + val YARN_APP_NAME_KEY = "appicationName" + + val YARN_MODE_KEY = "yarnMode" + + val EC_SERVICE_INSTANCE_KEY = "serviceInstance" + + val ECM_SERVICE_INSTANCE_KEY = "ecmServiceInstance" + + val MANAGER_SERVICE_INSTANCE_KEY = "managerServiceInstance" + + val NODE_STATUS_KEY = "nodeStatus" + + val EC_LAST_UNLOCK_TIMESTAMP = "lastUnlockTimestamp" + + val YARN_APP_TYPE_LIST_KEY = "yarnAppTypeList" + + val YARN_APP_STATE_LIST_KEY = "yarnAppStateList" + + val YARN_APP_TYPE_KEY = "yarnAppType" + + val YARN_APP_TYPE_SPARK = "spark" + + val YARN_APP_TYPE_FLINK = "flink" + + val EC_OPERATE_LIST = "list" + + val EC_OPERATE_STATUS = "status" + + val YARN_APP_RESULT_LIST_KEY = "yarnAppResultList" } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala index 19bd7f9cdb..64ece62fd7 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala @@ -86,6 +86,11 @@ abstract class CombinedEngineCodeParser extends CodeParser { } +/** + * Scala is no longer using Parser but instead using EmptyParser. If there is a comment at the end, + * it will cause the task to become stuck + */ +@deprecated class ScalaCodeParser extends SingleCodeParser with Logging { override val codeType: CodeType = CodeType.Scala @@ -109,6 +114,9 @@ class ScalaCodeParser extends SingleCodeParser with Logging { case _ => } if (statementBuffer.nonEmpty) codeBuffer.append(statementBuffer.mkString("\n")) + // Make sure the last line is not a comment + codeBuffer.append("\n") + codeBuffer.append("val linkisVar=123") codeBuffer.toArray } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala index 58c08b1f84..236046f3d4 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/ECPathUtils.scala @@ -17,6 +17,8 @@ package org.apache.linkis.governance.common.utils +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} + import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.time.DateFormatUtils @@ -45,4 +47,20 @@ object ECPathUtils { suffix + File.separator + ticketId } + def getECLogDirSuffix( + engineTypeLabel: EngineTypeLabel, + userCreatorLabel: UserCreatorLabel, + ticketId: String + ): String = { + if (null == engineTypeLabel || null == userCreatorLabel) { + return "" + } + val suffix = ECPathUtils.getECWOrkDirPathSuffix( + userCreatorLabel.getUser, + ticketId, + engineTypeLabel.getEngineType + ) + suffix + File.separator + "logs" + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala index 52e7802164..54927a84df 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceConstant.scala @@ -31,5 +31,4 @@ object GovernanceConstant { val REQUEST_ENGINE_STATUS_BATCH_LIMIT = 500 - def RESULTSET_INDEX: String = "resultsetIndex" } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala index 43fd598f71..ddcb17a3b2 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/utils/GovernanceUtils.scala @@ -25,6 +25,7 @@ import org.apache.commons.lang3.StringUtils import java.io.File import java.util +import java.util.{ArrayList, List} object GovernanceUtils extends Logging { @@ -72,4 +73,52 @@ object GovernanceUtils extends Logging { } } + def killECProcessByPort(port: String, desc: String, isSudo: Boolean): Unit = { + val subProcessKillScriptPath = + Configuration.getLinkisHome() + "/sbin/kill-ec-process-by-port.sh" + if ( + StringUtils.isBlank(subProcessKillScriptPath) || !new File(subProcessKillScriptPath) + .exists() + ) { + logger.error(s"Failed to locate kill-script, $subProcessKillScriptPath not exist") + } else if (StringUtils.isNotBlank(port)) { + val cmd = if (isSudo) { + Array("sudo", "sh", subProcessKillScriptPath, port) + } else { + Array("sh", subProcessKillScriptPath, port) + } + logger.info( + s"Starting to kill process and sub-processes. desc: $desc Kill Command: " + cmd + .mkString(" ") + ) + + Utils.tryCatch { + val output = Utils.exec(cmd, 600 * 1000L) + logger.info(s"Kill Success! desc: $desc. msg:\n ${output}") + } { t => + logger.error(s"Kill error! desc: $desc.", t) + } + } + } + + /** + * find process id by port number + * @param processPort + * @return + */ + def findProcessIdentifier(processPort: String): String = { + val findCmd = + "sudo netstat -tunlp | grep :" + processPort + " | awk '{print $7}' | awk -F/ '{print $1}'" + val cmdList = new util.ArrayList[String] + cmdList.add("bash") + cmdList.add("-c") + cmdList.add(findCmd) + try Utils.exec(cmdList.toArray(new Array[String](0)), 5000L) + catch { + case e: Exception => + logger.warn("Method findPid failed, " + e.getMessage) + null + } + } + } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala index 3cfe787f61..04adf3446c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala @@ -29,7 +29,7 @@ class ScalaCodeParserTest { "val codeBuffer = new ArrayBuffer[String]()\n val statementBuffer = new ArrayBuffer[String]()" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(scalaCode) - Assertions.assertTrue(array.size == 1) + Assertions.assertTrue(array.size == 3) } @@ -41,7 +41,7 @@ class ScalaCodeParserTest { " def addInt( a:Int, b:Int )\n var sum:Int = 0\n sum = a + b\n return sum\n }" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(abnormalCode) - Assertions.assertTrue(array.length == 1) + Assertions.assertTrue(array.length == 3) } @@ -54,7 +54,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(importCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.length == 4) } @@ -68,7 +68,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val arrayResult1 = scalaCodeParser.parse(specialCodeExp1) - Assertions.assertTrue(arrayResult1.length == 2) + Assertions.assertTrue(arrayResult1.length == 4) val specialCodeExp2 = " @BeanProperty\n var id: Long = _\n @BeanProperty\n var status: Int = 0\n " + @@ -79,7 +79,7 @@ class ScalaCodeParserTest { ".append(data, that.data)\n .isEquals\n }" val arrayResult2 = scalaCodeParser.parse(specialCodeExp2) - Assertions.assertTrue(arrayResult2.length == 1) + Assertions.assertTrue(arrayResult2.length == 3) } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala index 891d43c8b7..22f3cee233 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/utils/GovernanceConstantTest.scala @@ -31,7 +31,6 @@ class GovernanceConstantTest { val taskresourceversionstr = GovernanceConstant.TASK_RESOURCE_VERSION_STR val taskresourcefilenamestr = GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR val requestenginestatusbatchlimit = GovernanceConstant.REQUEST_ENGINE_STATUS_BATCH_LIMIT - val resultsetindex = GovernanceConstant.RESULTSET_INDEX Assertions.assertEquals("source", tasksourcemapkey) Assertions.assertEquals("resources", taskresourcesstr) @@ -39,7 +38,6 @@ class GovernanceConstantTest { Assertions.assertEquals("version", taskresourceversionstr) Assertions.assertEquals("fileName", taskresourcefilenamestr) Assertions.assertTrue(500 == requestenginestatusbatchlimit.intValue()) - Assertions.assertEquals("resultsetIndex", resultsetindex) } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala index fade444fa0..4b5b1fab9e 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineCommandBuilder.scala @@ -77,7 +77,7 @@ class UnixProcessEngineCommandBuilder extends ShellProcessEngineCommandBuilder { newLine("linkis_engineconn_errorcode=$?") newLine("if [ $linkis_engineconn_errorcode -ne 0 ]") newLine("then") - newLine(" tail -1000 ${LOG_DIRS}/stderr") + newLine(" timeout 10 tail -1000 ${LOG_DIRS}/stderr") newLine(" exit $linkis_engineconn_errorcode") newLine("fi") } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala index 91aa93e5fc..cc79e24d4f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/launch/ProcessEngineConnLaunch.scala @@ -166,8 +166,15 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { .findAvailPortByRange(GovernanceCommonConf.ENGINE_CONN_PORT_RANGE.getValue) .toString - var springConf = Map("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") - + var springConf = + Map[String, String]("server.port" -> engineConnPort, "spring.profiles.active" -> "engineconn") + val properties = + PortUtils.readFromProperties(Configuration.getLinkisHome + "/conf/version.properties") + if (StringUtils.isNotBlank(properties.getProperty("version"))) { + springConf += ("eureka.instance.metadata-map.linkis.app.version" -> properties.getProperty( + "version" + )) + } request.creationDesc.properties.asScala.filter(_._1.startsWith("spring.")).foreach { case (k, v) => springConf = springConf + (k -> v) @@ -255,14 +262,15 @@ trait ProcessEngineConnLaunch extends EngineConnLaunch with Logging { } } + /** + * process exit code if process is null retur errorcode 10 + * @return + */ def processWaitFor: Int = { if (process != null) { process.waitFor } else { - throw new ECMCoreException( - CAN_NOT_GET_TERMINATED.getErrorCode, - CAN_NOT_GET_TERMINATED.getErrorDesc - ) + 10 } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala index 160025ed51..395c9258b8 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/report/NodeHealthReport.scala @@ -34,9 +34,9 @@ trait NodeHealthReport { def getNodeMsg: String - def getUsedResource: Resource +// def getUsedResource: Resource - def setUsedResource(resource: Resource): Unit +// def setUsedResource(resource: Resource): Unit def getTotalResource: Resource diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala index 4612467193..8552020493 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/src/main/scala/org/apache/linkis/ecm/core/utils/PortUtils.scala @@ -17,15 +17,16 @@ package org.apache.linkis.ecm.core.utils -import org.apache.linkis.common.utils.Utils +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils -import java.io.IOException +import java.io.{BufferedReader, FileReader, IOException} import java.net.ServerSocket +import java.util.Properties -object PortUtils { +object PortUtils extends Logging { /** * portRange: '-' is the separator @@ -62,4 +63,23 @@ object PortUtils { Utils.tryFinally(socket.getLocalPort)(IOUtils.closeQuietly(socket)) } + def readFromProperties(propertiesFile: String): Properties = { + val properties: Properties = new Properties + var reader: BufferedReader = null; + try { + reader = new BufferedReader(new FileReader(propertiesFile)) + properties.load(reader) + } catch { + case e: Exception => + logger.warn(s"loading vsersion faild with path $propertiesFile error:$e") + } finally { + try if (reader != null) reader.close + catch { + case e: Exception => + logger.warn(s"try to close buffered reader with error:${e.getMessage}") + } + } + properties + } + } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java new file mode 100644 index 0000000000..24d9792fb6 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/java/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java @@ -0,0 +1,286 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.service.impl; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.ecm.server.conf.ECMConfiguration; +import org.apache.linkis.ecm.server.service.EngineConnKillService; +import org.apache.linkis.engineconn.common.conf.EngineConnConf; +import org.apache.linkis.governance.common.utils.GovernanceUtils; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; +import org.apache.linkis.manager.common.protocol.engine.EngineSuicideRequest; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.apache.commons.io.IOUtils; +import org.apache.commons.lang3.StringUtils; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ThreadPoolExecutor; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class DefaultEngineConnKillService implements EngineConnKillService { + + private static final Logger logger = LoggerFactory.getLogger(DefaultEngineConnKillService.class); + + private static final ThreadPoolExecutor ecYarnAppKillService = + Utils.newCachedThreadPool(10, "ECM-Kill-EC-Yarn-App", true); + + @Override + @Receiver + public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest) { + logger.info("received EngineStopRequest " + engineStopRequest); + String pid = null; + if (AMConstant.PROCESS_MARK.equals(engineStopRequest.getIdentifierType()) + && StringUtils.isNotBlank(engineStopRequest.getIdentifier())) { + pid = engineStopRequest.getIdentifier(); + } + logger.info("dealEngineConnStop return pid: {}", pid); + EngineStopResponse response = new EngineStopResponse(); + if (StringUtils.isNotBlank(pid)) { + if (!killEngineConnByPid(pid, engineStopRequest.getServiceInstance())) { + response.setStopStatus(false); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " failed."); + } else { + response.setStopStatus(true); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " succeed."); + } + } else { + String processPort = engineStopRequest.getServiceInstance().getInstance().split(":")[1]; + logger.warn("Kill EC {} by port {}", engineStopRequest.getServiceInstance(), processPort); + if (!killEngineConnByPort(processPort, engineStopRequest.getServiceInstance())) { + response.setStopStatus(false); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " failed."); + } else { + response.setStopStatus(true); + response.setMsg( + "Kill engine " + engineStopRequest.getServiceInstance().toString() + " succeed."); + } + } + + // Requires default kill yarn appid + if (AMConstant.PROCESS_MARK.equals(engineStopRequest.getIdentifierType())) { + killYarnAppIdOfOneEc(engineStopRequest); + } + + if (!response.getStopStatus()) { + EngineSuicideRequest request = + new EngineSuicideRequest( + engineStopRequest.getServiceInstance(), engineStopRequest.getUser()); + try { + Sender.getSender(engineStopRequest.getServiceInstance()).send(request); + response.setStopStatus(true); + response.setMsg(response.getMsg() + " Now send suicide request to engine."); + } catch (Exception e) { + response.setMsg( + response.getMsg() + " Sended suicide request to engine error, " + e.getMessage()); + } + } + return response; + } + + public void killYarnAppIdOfOneEc(EngineStopRequest engineStopRequest) { + String logDirSuffix = engineStopRequest.getLogDirSuffix(); + ServiceInstance serviceInstance = engineStopRequest.getServiceInstance(); + String engineType = engineStopRequest.getEngineType(); + String engineConnInstance = serviceInstance.toString(); + String engineLogDir; + if (logDirSuffix.startsWith(ECMConfiguration.ENGINECONN_ROOT_DIR())) { + engineLogDir = logDirSuffix; + } else { + engineLogDir = ECMConfiguration.ENGINECONN_ROOT_DIR() + File.separator + logDirSuffix; + } + logger.info( + "try to kill yarn app ids in the engine of: [{}] engineLogDir: [{}]", + engineConnInstance, + engineLogDir); + final String errEngineLogPath = engineLogDir.concat(File.separator).concat("yarnApp"); + logger.info( + "try to parse the yarn app id from the engine err log file path: [{}]", errEngineLogPath); + File file = new File(errEngineLogPath); + if (file.exists()) { + ecYarnAppKillService.execute( + () -> { + BufferedReader in = null; + try { + in = new BufferedReader(new FileReader(errEngineLogPath)); + String line; + String regex = getYarnAppRegexByEngineType(engineType); + if (StringUtils.isBlank(regex)) { + return; + } + Pattern pattern = Pattern.compile(regex); + List appIds = new ArrayList<>(); + while ((line = in.readLine()) != null) { + if (StringUtils.isNotBlank(line)) { + Matcher mApp = pattern.matcher(line); + if (mApp.find()) { + String candidate1 = mApp.group(mApp.groupCount()); + if (!appIds.contains(candidate1)) { + appIds.add(candidate1); + } + } + } + } + GovernanceUtils.killYarnJobApp(appIds); + logger.info("finished kill yarn app ids in the engine of ({}).", engineConnInstance); + } catch (IOException ioEx) { + if (ioEx instanceof FileNotFoundException) { + logger.error("the engine log file {} not found.", errEngineLogPath); + } else { + logger.error( + "the engine log file parse failed. the reason is {}", ioEx.getMessage()); + } + } finally { + IOUtils.closeQuietly(in); + } + }); + } + } + + private String getYarnAppRegexByEngineType(String engineType) { + if (StringUtils.isBlank(engineType)) { + return ""; + } + String regex; + switch (engineType) { + case "spark": + case "shell": + regex = EngineConnConf.SPARK_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); + break; + case "sqoop": + regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); + break; + case "flink": + case "hive": + regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); + break; + default: + regex = ""; + } + return regex; + } + + private boolean killEngineConnByPid(String processId, ServiceInstance serviceInstance) { + logger.info("try to kill {} toString with pid({}).", serviceInstance.toString(), processId); + if (StringUtils.isNotBlank(processId)) { + if (ECMConfiguration.ECM_PROCESS_SCRIPT_KILL()) { + GovernanceUtils.killProcess(processId, serviceInstance.toString(), true); + } else { + killProcessByKillCmd(processId, serviceInstance.toString()); + } + return !isProcessAlive(processId); + } else { + logger.warn("cannot kill {} with empty pid.", serviceInstance); + return false; + } + } + + private boolean killEngineConnByPort(String port, ServiceInstance serviceInstance) { + logger.info("try to kill {} toString with port({}).", serviceInstance.toString(), port); + if (StringUtils.isNotBlank(port)) { + GovernanceUtils.killECProcessByPort(port, serviceInstance.toString(), true); + return !isProcessAliveByPort(port); + } else { + logger.warn("cannot kill {} with empty port.", serviceInstance); + return false; + } + } + + private boolean isProcessAlive(String pid) { + String findCmd = + "ps -ef | grep " + + pid + + " | grep EngineConnServer | awk '{print \"exists_\"$2}' | grep " + + pid + + "|| true"; + List cmdList = new ArrayList<>(); + cmdList.add("bash"); + cmdList.add("-c"); + cmdList.add(findCmd); + try { + String rs = Utils.exec(cmdList.toArray(new String[0]), 5000L); + return null != rs && rs.contains("exists_" + pid); + } catch (Exception e) { + logger.warn("Method isProcessAlive failed", e); + return false; + } + } + + private boolean isProcessAliveByPort(String port) { + String findCmd = + "ps -ef | grep server.port= " + + port + + " | grep EngineConnServer | awk -F \"server.port=\" '{print \"exists_\"$2}'"; + List cmdList = new ArrayList<>(); + cmdList.add("bash"); + cmdList.add("-c"); + cmdList.add(findCmd); + try { + String rs = Utils.exec(cmdList.toArray(new String[0]), 5000L); + return null != rs && rs.contains("exists_" + port); + } catch (Exception e) { + logger.warn("Method isProcessAlive failed", e); + return false; + } + } + + private void killProcessByKillCmd(String pid, String desc) { + String k15cmd = "sudo kill " + pid; + String k9cmd = "sudo kill -9 " + pid; + int tryNum = 0; + try { + while (isProcessAlive(pid) && tryNum <= 3) { + logger.info( + "{} still alive with pid({}), use shell command to kill it. try {}++", + desc, + pid, + tryNum++); + if (tryNum <= 3) { + Utils.exec(k15cmd.split(" "), 3000L); + } else { + logger.info( + "{} still alive with pid({}). try {}, use shell command to kill -9 it", + desc, + pid, + tryNum); + Utils.exec(k9cmd.split(" "), 3000L); + } + Thread.sleep(5000); + } + } catch (InterruptedException e) { + logger.error("Interrupted while killing engine {} with pid({})." + desc, pid); + } + } +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala index 6bcb6c3b8b..db4ccea7f9 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/listener/ECMReadyEvent.scala @@ -19,22 +19,11 @@ package org.apache.linkis.ecm.server.listener import org.apache.linkis.ecm.core.engineconn.EngineConn import org.apache.linkis.ecm.core.listener.ECMEvent -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol} +import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest case class ECMReadyEvent(params: Array[String]) extends ECMEvent case class ECMClosedEvent() extends ECMEvent -case class EngineConnStatusChageEvent(from: NodeStatus, to: NodeStatus) extends ECMEvent - -case class YarnAppIdCallbackEvent(protocol: YarnAPPIdCallbackProtocol) extends ECMEvent - -case class YarnInfoCallbackEvent(protocol: YarnInfoCallbackProtocol) extends ECMEvent - -case class EngineConnPidCallbackEvent(protocol: ResponseEngineConnPid) extends ECMEvent - -case class EngineConnAddEvent(conn: EngineConn) extends ECMEvent - -case class EngineConnStatusChangeEvent(tickedId: String, updateStatus: NodeStatus) extends ECMEvent +case class EngineConnStopEvent(engineConn: EngineConn, engineStopRequest: EngineStopRequest) + extends ECMEvent diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala index bc856ba681..66327dadcf 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnLogOperator.scala @@ -17,15 +17,13 @@ package org.apache.linkis.ecm.server.operator -import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.exception.ECMErrorException -import org.apache.linkis.ecm.server.service.{EngineConnListService, LocalDirsHandleService} +import org.apache.linkis.ecm.server.service.LocalDirsHandleService import org.apache.linkis.manager.common.operator.Operator -import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest import org.apache.commons.io.IOUtils import org.apache.commons.io.input.ReversedLinesFileReader @@ -37,14 +35,10 @@ import java.text.MessageFormat import java.util import java.util.Collections -import scala.collection.JavaConverters.asScalaBufferConverter import scala.util.matching.Regex class EngineConnLogOperator extends Operator with Logging { - private var engineConnListService: EngineConnListService = _ - private var localDirsHandleService: LocalDirsHandleService = _ - override def getNames: Array[String] = Array(EngineConnLogOperator.OPERATOR_NAME) override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { @@ -95,10 +89,7 @@ class EngineConnLogOperator extends Operator with Logging { var readLine, skippedLine, lineNum = 0 var rowIgnore = false var ignoreLine = 0 - val linePattern = Option(EngineConnLogOperator.MULTILINE_PATTERN.getValue) match { - case Some(pattern) => pattern.r - case _ => null - } + val linePattern = getLinePattern val maxMultiline = EngineConnLogOperator.MULTILINE_MAX.getValue Utils.tryFinally { var line = randomAndReversedReadLine() @@ -155,59 +146,20 @@ class EngineConnLogOperator extends Operator with Logging { logPath } + protected def getLinePattern: Regex = { + Option(EngineConnLogOperator.MULTILINE_PATTERN.getValue) match { + case Some(pattern) => pattern.r + case _ => null + } + } + protected def getEngineConnInfo(implicit parameters: Map[String, Any] ): (String, String, String) = { - if (engineConnListService == null) { - engineConnListService = - DataWorkCloudApplication.getApplicationContext.getBean(classOf[EngineConnListService]) - localDirsHandleService = - DataWorkCloudApplication.getApplicationContext.getBean(classOf[LocalDirsHandleService]) - } val logDIrSuffix = getAs("logDirSuffix", "") - val (engineConnLogDir, engineConnInstance, ticketId) = - if (StringUtils.isNotBlank(logDIrSuffix)) { - val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix - val ticketId = getAs("ticketId", "") - (ecLogPath, "", ticketId) - } else { - val engineConnInstance = getAs( - ECMOperateRequest.ENGINE_CONN_INSTANCE_KEY, - getAs[String]("engineConnInstance", null) - ) - Option(engineConnInstance) - .flatMap { instance => - engineConnListService.getEngineConns.asScala.find( - _.getServiceInstance.getInstance == instance - ) - } - .map(engineConn => - ( - engineConn.getEngineConnManagerEnv.engineConnLogDirs, - engineConnInstance, - engineConn.getTickedId - ) - ) - .getOrElse { - val ticketId = getAs("ticketId", "") - if (StringUtils.isBlank(ticketId)) { - throw new ECMErrorException( - BOTH_NOT_EXISTS.getErrorCode, - s"the parameters of ${ECMOperateRequest.ENGINE_CONN_INSTANCE_KEY}, engineConnInstance and ticketId are both not exists." - ) - } - val logDir = engineConnListService - .getEngineConn(ticketId) - .map(_.getEngineConnManagerEnv.engineConnLogDirs) - .getOrElse { - val creator = getAsThrow[String]("creator") - val engineConnType = getAsThrow[String]("engineConnType") - localDirsHandleService.getEngineConnLogDir(creator, ticketId, engineConnType) - } - (logDir, engineConnInstance, ticketId) - } - } - (ticketId, engineConnInstance, engineConnLogDir) + val ecLogPath = ECMConfiguration.ENGINECONN_ROOT_DIR + File.separator + logDIrSuffix + val ticketId = getAs("ticketId", "") + (ticketId, "", ecLogPath) } private def includeLine( diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala index e00d16b519..36e7ddfc5f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/operator/EngineConnYarnLogOperator.scala @@ -17,20 +17,36 @@ package org.apache.linkis.ecm.server.operator -import org.apache.linkis.common.exception.LinkisCommonErrorException +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.io.FsPath import org.apache.linkis.common.utils.Utils -import org.apache.linkis.ecm.core.conf.ECMErrorCode import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ import org.apache.linkis.ecm.server.exception.ECMErrorException +import org.apache.linkis.storage.FSFactory +import org.apache.linkis.storage.fs.FileSystem +import org.apache.linkis.storage.utils.StorageUtils + +import org.apache.commons.lang3.StringUtils import java.io.File import java.text.MessageFormat -import java.util.concurrent.TimeUnit +import java.util +import java.util.concurrent.{Callable, ConcurrentHashMap, ExecutorService, Future, TimeUnit} import scala.collection.JavaConverters._ +import scala.util.matching.Regex class EngineConnYarnLogOperator extends EngineConnLogOperator { + private implicit val fs: FileSystem = + FSFactory.getFs(StorageUtils.FILE).asInstanceOf[FileSystem] + + /** + * Yarn log fetchers + */ + private def yarnLogFetchers: ConcurrentHashMap[String, Future[String]] = + new ConcurrentHashMap[String, Future[String]]() + override def getNames: Array[String] = Array(EngineConnYarnLogOperator.OPERATOR_NAME) override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { @@ -38,75 +54,118 @@ class EngineConnYarnLogOperator extends EngineConnLogOperator { Utils.tryFinally { result = super.apply(parameters) result - } { - result.get("logPath") match { - case Some(path: String) => - val logFile = new File(path) - if (logFile.exists() && logFile.getName.startsWith(".")) { - // If is a temporary file, drop it - logger.info(s"Delete the temporary yarn log file: [$path]") - if (!logFile.delete()) { - logger.warn(s"Fail to delete the temporary yarn log file: [$path]") - } - } - } - } + } {} } override def getLogPath(implicit parameters: Map[String, Any]): File = { + val applicationId = getAsThrow[String]("yarnApplicationId") val (ticketId, engineConnInstance, engineConnLogDir) = getEngineConnInfo(parameters) - val rootLogDir = new File(engineConnLogDir) - if (!rootLogDir.exists() || !rootLogDir.isDirectory) { + val rootLogPath = EngineConnYarnLogOperator.YARN_LOG_STORAGE_PATH.getValue match { + case storePath if StringUtils.isNotBlank(storePath) => + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + storePath + "/" + applicationId) + // Invoke to create directory + fs.mkdir(logPath) + // 777 permission + fs.setPermission(logPath, "rwxrwxrwx") + logPath + case _ => new FsPath(StorageUtils.FILE_SCHEMA + engineConnLogDir) + } + if (!fs.exists(rootLogPath) || !rootLogPath.toFile.isDirectory) { throw new ECMErrorException( LOG_IS_NOT_EXISTS.getErrorCode, - MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc, rootLogDir) + MessageFormat.format(LOG_IS_NOT_EXISTS.getErrorDesc, rootLogPath.getPath) ) } val creator = getAsThrow[String]("creator") - val applicationId = getAsThrow[String]("yarnApplicationId") - var logPath = new File(engineConnLogDir, "yarn_" + applicationId) - if (!logPath.exists()) { - val tempLogFile = - s".yarn_${applicationId}_${System.currentTimeMillis()}_${Thread.currentThread().getId}" - Utils.tryCatch { - var command = s"yarn logs -applicationId $applicationId >> $rootLogDir/$tempLogFile" - logger.info(s"Fetch yarn logs to temporary file: [$command]") - val processBuilder = new ProcessBuilder(sudoCommands(creator, command): _*) - processBuilder.environment.putAll(sys.env.asJava) - processBuilder.redirectErrorStream(false) - val process = processBuilder.start() - val waitFor = process.waitFor(5, TimeUnit.SECONDS) - logger.trace(s"waitFor: ${waitFor}, result: ${process.exitValue()}") - if (waitFor && process.waitFor() == 0) { - command = s"mv $rootLogDir/$tempLogFile $rootLogDir/yarn_$applicationId" - logger.info(s"Move and save yarn logs: [$command]") - Utils.exec(sudoCommands(creator, command)) - } else { - logPath = new File(engineConnLogDir, tempLogFile) - if (!logPath.exists()) { - throw new LinkisCommonErrorException( - -1, - s"Fetch yarn logs timeout, log aggregation has not completed or is not enabled" - ) - } + var logPath = new FsPath( + StorageUtils.FILE_SCHEMA + rootLogPath.getPath + "/yarn_" + applicationId + ) + if (!fs.exists(logPath)) { + val fetcher = yarnLogFetchers.computeIfAbsent( + applicationId, + new util.function.Function[String, Future[String]] { + override def apply(v1: String): Future[String] = + requestToFetchYarnLogs(creator, applicationId, rootLogPath.getPath) } - } { case e: Exception => - throw new LinkisCommonErrorException( - -1, - s"Fail to fetch yarn logs application: $applicationId, message: ${e.getMessage}" - ) + ) + // Just wait 5 seconds + Option(fetcher.get(5, TimeUnit.SECONDS)) match { + case Some(path) => logPath = new FsPath(StorageUtils.FILE_SCHEMA + path) + case _ => } + } - if (!logPath.exists() || !logPath.isFile) { + if (!fs.exists(logPath) || logPath.toFile.isDirectory) { throw new ECMErrorException( LOGFILE_IS_NOT_EXISTS.getErrorCode, - MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath) + MessageFormat.format(LOGFILE_IS_NOT_EXISTS.getErrorDesc, logPath.getPath) ) } logger.info( s"Try to fetch EngineConn(id: $ticketId, instance: $engineConnInstance) yarn logs from ${logPath.getPath} in application id: $applicationId" ) - logPath + logPath.toFile + } + + /** + * Not support line pattern in yarn log + * @return + */ + override protected def getLinePattern: Regex = null + + /** + * Request the log fetcher + * + * @param creator + * creator + * @param applicationId + * application id + * @param logPath + * log path + * @return + */ + private def requestToFetchYarnLogs( + creator: String, + applicationId: String, + yarnLogDir: String + ): Future[String] = { + EngineConnYarnLogOperator.YARN_LOG_FETCH_SCHEDULER.submit(new Callable[String] { + override def call(): String = { + val logPath = new FsPath(StorageUtils.FILE_SCHEMA + yarnLogDir + "/yarn_" + applicationId) + if (!fs.exists(logPath)) { + val tempLogFile = + s".yarn_${applicationId}_${System.currentTimeMillis()}_${Thread.currentThread().getId}" + Utils.tryCatch { + var command = + s"yarn logs -applicationId $applicationId >> $yarnLogDir/$tempLogFile" + logger.info(s"Fetch yarn logs to temporary file: [$command]") + val processBuilder = new ProcessBuilder(sudoCommands(creator, command): _*) + processBuilder.environment.putAll(sys.env.asJava) + processBuilder.redirectErrorStream(false) + val process = processBuilder.start() + val exitCode = process.waitFor() + logger.trace(s"Finish to fetch yan logs to temporary file, result: ${exitCode}") + if (exitCode == 0) { + command = s"mv $yarnLogDir/$tempLogFile $yarnLogDir/yarn_$applicationId" + logger.info(s"Move and save yarn logs(${applicationId}): [$command]") + Utils.exec(sudoCommands(creator, command)) + } + } { e: Throwable => + logger.error( + s"Fail to fetch yarn logs application: $applicationId, message: ${e.getMessage}" + ) + } + val tmpFile = new File(yarnLogDir, tempLogFile) + if (tmpFile.exists()) { + logger.info(s"Delete temporary file: [${tempLogFile}] in yarn logs fetcher") + tmpFile.delete() + } + } + // Remove future + yarnLogFetchers.remove(applicationId) + if (fs.exists(logPath)) logPath.getPath else null + } + }) } private def sudoCommands(creator: String, command: String): Array[String] = { @@ -121,4 +180,15 @@ class EngineConnYarnLogOperator extends EngineConnLogOperator { object EngineConnYarnLogOperator { val OPERATOR_NAME = "engineConnYarnLog" + + // Specific the path to store the yarn logs + val YARN_LOG_STORAGE_PATH: CommonVars[String] = + CommonVars("linkis.engineconn.log.yarn.storage-path", "") + + val YARN_LOG_FETCH_THREAD: CommonVars[Int] = + CommonVars("linkis.engineconn.log.yarn.fetch.thread-num", 5) + + val YARN_LOG_FETCH_SCHEDULER: ExecutorService = + Utils.newFixedThreadPool(YARN_LOG_FETCH_THREAD.getValue + 1, "yarn_logs_fetch", false) + } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala index 6fdf8b3488..150d0be6ba 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/report/DefaultECMHealthReport.scala @@ -69,9 +69,9 @@ class DefaultECMHealthReport extends ECMHealthReport { override def getNodeMsg: String = nodeMsg - override def getUsedResource: Resource = usedResource +// override def getUsedResource: Resource = usedResource - override def setUsedResource(usedResource: Resource): Unit = this.usedResource = usedResource +// override def setUsedResource(usedResource: Resource): Unit = this.usedResource = usedResource override def getTotalResource: Resource = totalResource diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala deleted file mode 100644 index d36d35cceb..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnListService.scala +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service - -import org.apache.linkis.ecm.core.engineconn.EngineConn -import org.apache.linkis.ecm.core.launch.EngineConnLaunchRunner -import org.apache.linkis.manager.common.entity.resource.Resource - -import java.util - -/** - * The enginelistservice interface manages the interface started by the engine The most important - * submit method is to put the thread that starts the engine into the thread pool to start - * EngineListService接口管理引擎启动的接口 最重要的submit方法是将启动引擎的线程放入到线程池中进行启动 - */ -trait EngineConnListService { - - def init(): Unit - - def getEngineConn(engineConnId: String): Option[EngineConn] - - def getEngineConns: util.List[EngineConn] - - def addEngineConn(engineConn: EngineConn): Unit - - def killEngineConn(engineConnId: String): Unit - - def getUsedResources: Resource - - def submit(runner: EngineConnLaunchRunner): Option[EngineConn] - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala index 6ac10d1e14..49e75fb6b8 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/AbstractEngineConnLaunchService.scala @@ -25,22 +25,25 @@ import org.apache.linkis.ecm.server.LinkisECMApplication import org.apache.linkis.ecm.server.conf.ECMConfiguration._ import org.apache.linkis.ecm.server.engineConn.DefaultEngineConn import org.apache.linkis.ecm.server.hook.ECMHook -import org.apache.linkis.ecm.server.listener.{EngineConnAddEvent, EngineConnStatusChangeEvent} +import org.apache.linkis.ecm.server.listener.EngineConnStopEvent import org.apache.linkis.ecm.server.service.{EngineConnLaunchService, ResourceLocalizationService} import org.apache.linkis.ecm.server.util.ECMUtils import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.governance.common.utils.JobUtils +import org.apache.linkis.governance.common.utils.{ECPathUtils, JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus.Failed import org.apache.linkis.manager.common.entity.node.{AMEngineNode, EngineNode} -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM +import org.apache.linkis.manager.common.protocol.engine.{ + EngineConnStatusCallbackToAM, + EngineStopRequest +} import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest +import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.commons.lang3.exception.ExceptionUtils -import scala.concurrent.{ExecutionContextExecutorService, Future} -import scala.util.{Failure, Success} +import scala.concurrent.ExecutionContextExecutorService abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService with Logging { @@ -61,8 +64,9 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w } override def launchEngineConn(request: EngineConnLaunchRequest, duration: Long): EngineNode = { - // 1.创建engineConn和runner,launch 并设置基础属性 + // create engineConn/runner/launch val taskId = JobUtils.getJobIdFromStringMap(request.creationDesc.properties) + LoggerUtils.setJobIdMDC(taskId) logger.info("TaskId: {} try to launch a new EngineConn with {}.", taskId: Any, request: Any) val conn = createEngineConn val runner = createEngineConnLaunchRunner @@ -77,11 +81,9 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w conn.setStatus(NodeStatus.Starting) conn.setEngineConnInfo(new EngineConnInfo) conn.setEngineConnManagerEnv(launch.getEngineConnManagerEnv()) - // 2.资源本地化,并且设置ecm的env环境信息 + // get ec Resource getResourceLocalizationServie.handleInitEngineConnResources(request, conn) - // 3.添加到list - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(EngineConnAddEvent(conn)) - // 4.run + // start ec Utils.tryCatch { beforeLaunch(request, conn, duration) runner.run() @@ -95,44 +97,12 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w case _ => } afterLaunch(request, conn, duration) - - val future = Future { - logger.info( - "TaskId: {} with request {} wait engineConn {} start", - Array(taskId, request, conn.getServiceInstance): _* - ) - waitEngineConnStart(request, conn, duration) - } - - future onComplete { - case Failure(t) => - logger.error( - "TaskId: {} init {} failed. {} with request {}", - Array( - taskId, - conn.getServiceInstance, - conn.getEngineConnLaunchRunner.getEngineConnLaunch - .getEngineConnManagerEnv() - .engineConnWorkDir, - request - ): _* - ) - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(conn.getTickedId, Failed) - ) - case Success(_) => - logger.info( - "TaskId: {} init {} succeed. {} with request {}", - Array( - taskId, - conn.getServiceInstance, - conn.getEngineConnLaunchRunner.getEngineConnLaunch - .getEngineConnManagerEnv() - .engineConnWorkDir, - request - ): _* - ) - } + logger.info( + "TaskId: {} with request {} wait engineConn {} start", + Array(taskId, request, conn.getServiceInstance): _* + ) + // start ec monitor thread + startEngineConnMonitorStart(request, conn) } { t => logger.error( "TaskId: {} init {} failed, {}, with request {} now stop and delete it. message: {}", @@ -147,31 +117,43 @@ abstract class AbstractEngineConnLaunchService extends EngineConnLaunchService w t ): _* ) - conn.getEngineConnLaunchRunner.stop() Sender .getSender(MANAGER_SERVICE_NAME) .send( EngineConnStatusCallbackToAM( conn.getServiceInstance, - NodeStatus.ShuttingDown, + NodeStatus.Failed, " wait init failed , reason " + ExceptionUtils.getRootCauseMessage(t) ) ) - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(conn.getTickedId, Failed) + conn.setStatus(NodeStatus.Failed) + val engineType = LabelUtil.getEngineType(request.labels) + val logPath = Utils.tryCatch(conn.getEngineConnManagerEnv.engineConnLogDirs) { t => + ECPathUtils.getECWOrkDirPathSuffix(request.user, request.ticketId, engineType) + } + val engineStopRequest = new EngineStopRequest + engineStopRequest.setEngineType(engineType) + engineStopRequest.setUser(request.user) + engineStopRequest.setIdentifier(conn.getPid) + engineStopRequest.setIdentifierType(AMConstant.PROCESS_MARK) + engineStopRequest.setLogDirSuffix(logPath) + engineStopRequest.setServiceInstance(conn.getServiceInstance) + LinkisECMApplication.getContext.getECMAsyncListenerBus.post( + EngineConnStopEvent(conn, engineStopRequest) ) + LoggerUtils.removeJobIdMDC() throw t } + LoggerUtils.removeJobIdMDC() val engineNode = new AMEngineNode() engineNode.setLabels(conn.getLabels) - engineNode.setServiceInstance(conn.getServiceInstance) engineNode.setOwner(request.user) - engineNode.setMark("process") + engineNode.setMark(AMConstant.PROCESS_MARK) engineNode } - def waitEngineConnStart(request: EngineConnLaunchRequest, conn: EngineConn, duration: Long): Unit + def startEngineConnMonitorStart(request: EngineConnLaunchRequest, conn: EngineConn): Unit def createEngineConn: EngineConn = new DefaultEngineConn diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala index 132749cbec..be879f6877 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMHealthService.scala @@ -25,7 +25,7 @@ import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.conf.ECMConfiguration._ import org.apache.linkis.ecm.server.listener.{ECMClosedEvent, ECMReadyEvent} import org.apache.linkis.ecm.server.report.DefaultECMHealthReport -import org.apache.linkis.ecm.server.service.{ECMHealthService, EngineConnListService} +import org.apache.linkis.ecm.server.service.ECMHealthService import org.apache.linkis.ecm.server.util.ECMUtils import org.apache.linkis.manager.common.entity.enumeration.{NodeHealthy, NodeStatus} import org.apache.linkis.manager.common.entity.metrics.{NodeHealthyInfo, NodeOverLoadInfo} @@ -38,8 +38,6 @@ import org.apache.linkis.manager.common.protocol.node.{ import org.apache.linkis.rpc.Sender import org.apache.linkis.rpc.message.annotation.Receiver -import org.springframework.beans.factory.annotation.Autowired - import java.util.Date import java.util.concurrent.TimeUnit @@ -79,9 +77,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener { TimeUnit.SECONDS ) - @Autowired - private var engineConnListService: EngineConnListService = _ - override def getLastEMHealthReport: ECMHealthReport = { val report = new DefaultECMHealthReport report.setNodeId(LinkisECMApplication.getECMServiceInstance.toString) @@ -89,7 +84,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener { // todo report right metrics report.setTotalResource(maxResource) report.setProtectedResource(minResource) - report.setUsedResource(engineConnListService.getUsedResources) report.setReportTime(new Date().getTime) report.setRunningEngineConns( LinkisECMApplication.getContext.getECMMetrics.getRunningEngineConns @@ -117,7 +111,6 @@ class DefaultECMHealthService extends ECMHealthService with ECMEventListener { // todo report latest engineconn metrics resource.setMaxResource(maxResource) resource.setMinResource(minResource) - resource.setUsedResource(report.getUsedResource) heartbeat.setNodeResource(resource) heartbeat.setHeartBeatMsg("") val nodeHealthyInfo = new NodeHealthyInfo diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala index eb9206c963..4c7807dad1 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultECMRegisterService.scala @@ -37,7 +37,9 @@ import java.util.Collections class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener with Logging { - private implicit def readyEvent2RegisterECMRequest(event: ECMReadyEvent): RegisterEMRequest = { + private var unRegisterFlag = false + + private def readyEvent2RegisterECMRequest(event: ECMReadyEvent): RegisterEMRequest = { val request = new RegisterEMRequest val instance = Sender.getThisServiceInstance request.setUser(Utils.getJvmUser) @@ -50,14 +52,11 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener private def getLabelsFromArgs(params: Array[String]): util.Map[String, AnyRef] = { import scala.collection.JavaConverters._ - val labelRegex = """label\.(.+)\.(.+)=(.+)""".r val labels = new util.HashMap[String, AnyRef]() - // TODO: magic labels.asScala += LabelKeyConstant.SERVER_ALIAS_KEY -> Collections.singletonMap( "alias", ENGINE_CONN_MANAGER_SPRING_NAME ) - // TODO: group by key labels } @@ -81,12 +80,12 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener } override def onEvent(event: ECMEvent): Unit = event match { - case event: ECMReadyEvent => registerECM(event) - case event: ECMClosedEvent => unRegisterECM(event) + case event: ECMReadyEvent => registerECM(readyEvent2RegisterECMRequest(event)) + case event: ECMClosedEvent => unRegisterECM(closeEvent2StopECMRequest(event)) case _ => } - private implicit def closeEvent2StopECMRequest(event: ECMClosedEvent): StopEMRequest = { + private def closeEvent2StopECMRequest(event: ECMClosedEvent): StopEMRequest = { val request = new StopEMRequest val instance = Sender.getThisServiceInstance request.setUser(Utils.getJvmUser) @@ -114,7 +113,10 @@ class DefaultECMRegisterService extends ECMRegisterService with ECMEventListener override def unRegisterECM(request: StopEMRequest): Unit = { logger.info("start unRegister ecm") - Sender.getSender(MANAGER_SERVICE_NAME).send(request) + if (!unRegisterFlag) { + Sender.getSender(MANAGER_SERVICE_NAME).send(request) + } + unRegisterFlag = true } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java deleted file mode 100644 index 4f593736d9..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnKillService.java +++ /dev/null @@ -1,242 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl; - -import org.apache.commons.io.IOUtils; -import org.apache.linkis.common.ServiceInstance; -import org.apache.linkis.common.utils.Utils; -import org.apache.linkis.ecm.core.engineconn.EngineConn; -import org.apache.linkis.ecm.server.conf.ECMConfiguration; -import org.apache.linkis.ecm.server.service.EngineConnKillService; -import org.apache.linkis.ecm.server.service.EngineConnListService; -import org.apache.linkis.engineconn.common.conf.EngineConnConf; -import org.apache.linkis.governance.common.utils.GovernanceUtils; -import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; -import org.apache.linkis.manager.common.protocol.engine.EngineStopResponse; -import org.apache.linkis.manager.common.protocol.engine.EngineSuicideRequest; -import org.apache.linkis.manager.label.entity.Label; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.rpc.message.annotation.Receiver; -import org.apache.linkis.rpc.Sender; -import org.apache.commons.lang3.StringUtils; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.BufferedReader; -import java.io.File; -import java.io.FileNotFoundException; -import java.io.FileReader; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Optional; -import java.util.concurrent.ThreadPoolExecutor; -import java.util.regex.Matcher; -import java.util.regex.Pattern; - -public class DefaultEngineConnKillService implements EngineConnKillService { - - private static final Logger logger = LoggerFactory.getLogger(DefaultEngineConnKillService.class); - - private EngineConnListService engineConnListService; - - public void setEngineConnListService(EngineConnListService engineConnListService) { - this.engineConnListService = engineConnListService; - } - - private static final ThreadPoolExecutor ecYarnAppKillService = Utils.newCachedThreadPool(10, "ECM-Kill-EC-Yarn-App", true); - - @Override - @Receiver - public EngineStopResponse dealEngineConnStop(EngineStopRequest engineStopRequest) { - logger.info("received EngineStopRequest " + engineStopRequest); - EngineConn engineConn = getEngineConnByServiceInstance(engineStopRequest.getServiceInstance()); - EngineStopResponse response = new EngineStopResponse(); - if (null != engineConn) { - if(!killEngineConnByPid(engineConn)) { - response.setStopStatus(false); - response.setMsg("Kill engine " + engineConn.getServiceInstance().toString() + " failed."); - } else { - response.setStopStatus(true); - response.setMsg("Kill engine " + engineConn.getServiceInstance().toString() + " succeed."); - } - killYarnAppIdOfOneEc(engineConn); - } else { - logger.warn("Cannot find engineconn : " + engineStopRequest.getServiceInstance().toString() + " in this engineConnManager engineConn list, cannot kill."); - response.setStopStatus(true); - response.setMsg("EngineConn " + engineStopRequest.getServiceInstance().toString() + " was not found in this engineConnManager."); - } - if (!response.getStopStatus()) { - EngineSuicideRequest request = new EngineSuicideRequest(engineStopRequest.getServiceInstance(), engineStopRequest.getUser()); - try { - Sender.getSender(engineStopRequest.getServiceInstance()).send(request); - response.setStopStatus(true); - response.setMsg(response.getMsg() + " Now send suicide request to engine."); - } catch (Exception e) { - response.setMsg(response.getMsg() + " Sended suicide request to engine error, " + e.getMessage()); - } - } - return response; - } - - public void killYarnAppIdOfOneEc(EngineConn engineConn) { - String engineConnInstance = engineConn.getServiceInstance().toString(); - logger.info("try to kill yarn app ids in the engine of ({}).", engineConnInstance); - String engineLogDir = engineConn.getEngineConnManagerEnv().engineConnLogDirs(); - final String errEngineLogPath = engineLogDir.concat(File.separator).concat("yarnApp.log"); - logger.info("try to parse the yarn app id from the engine err log file path: {}", errEngineLogPath); - File file = new File(errEngineLogPath); - if (file.exists()) - { - ecYarnAppKillService.execute(() -> { - BufferedReader in = null; - try { - in = new BufferedReader(new FileReader(errEngineLogPath)); - String line; - String regex = getYarnAppRegexByEngineType(engineConn); - if (StringUtils.isBlank(regex)) { - return; - } - Pattern pattern = Pattern.compile(regex); - List appIds = new ArrayList<>(); - while ((line = in.readLine()) != null) { - if (StringUtils.isNotBlank(line)) { - Matcher mApp = pattern.matcher(line); - if (mApp.find()) { - String candidate1 = mApp.group(mApp.groupCount()); - if (!appIds.contains(candidate1)) { - appIds.add(candidate1); - } - } - } - } - GovernanceUtils.killYarnJobApp(appIds); - logger.info("finished kill yarn app ids in the engine of ({}).", engineConnInstance); - } catch (IOException ioEx) { - if (ioEx instanceof FileNotFoundException) { - logger.error("the engine log file {} not found.", errEngineLogPath); - } else { - logger.error("the engine log file parse failed. the reason is {}", ioEx.getMessage()); - } - } finally { - IOUtils.closeQuietly(in); - } - }); - } - } - - private String getYarnAppRegexByEngineType(EngineConn engineConn) { - List> labels = engineConn.getLabels(); - String engineType = ""; - if (labels != null && !labels.isEmpty()) { - Optional labelOptional = labels.stream().filter(label -> label instanceof EngineTypeLabel) - .map(label -> (EngineTypeLabel) label).findFirst(); - if (labelOptional.isPresent()) { - EngineTypeLabel engineTypeLabel = labelOptional.get(); - engineType = engineTypeLabel.getEngineType(); - } - } - if (StringUtils.isBlank(engineType)) { - return ""; - } - String regex; - switch (engineType) { - case "spark": - case "shell": - regex = EngineConnConf.SPARK_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; - case "sqoop": - regex = EngineConnConf.SQOOP_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; - case "hive": - regex = EngineConnConf.HIVE_ENGINE_CONN_YARN_APP_ID_PARSE_REGEX().getValue(); - break; - default: - regex = ""; - } - return regex; - } - - private EngineConn getEngineConnByServiceInstance(ServiceInstance serviceInstance) { - if (null == serviceInstance) { - return null; - } - List engineConnList = engineConnListService.getEngineConns(); - for (EngineConn engineConn : engineConnList) { - if (null != engineConn && serviceInstance.equals(engineConn.getServiceInstance())) { - return engineConn; - } - } - return null; - } - - private boolean killEngineConnByPid(EngineConn engineConn) { - logger.info("try to kill {} toString with pid({}).", engineConn.getServiceInstance().toString(), engineConn.getPid()); - if (StringUtils.isNotBlank(engineConn.getPid())) { - if (ECMConfiguration.ECM_PROCESS_SCRIPT_KILL()) { - GovernanceUtils.killProcess(engineConn.getPid(), engineConn.getServiceInstance().toString(), true); - } else { - killProcessByKillCmd(engineConn.getPid(), engineConn.getServiceInstance().toString()); - } - if (isProcessAlive(engineConn.getPid())) { - return false; - } else { - return true; - } - } else { - logger.warn("cannot kill {} with empty pid.", engineConn.getServiceInstance().toString()); - return false; - } - } - - private boolean isProcessAlive(String pid) { - String findCmd = "ps -ef | grep " + pid + " | grep EngineConnServer | awk '{print \"exists_\"$2}' | grep " + pid; - List cmdList = new ArrayList<>(); - cmdList.add("bash"); - cmdList.add("-c"); - cmdList.add(findCmd); - try { - String rs = Utils.exec(cmdList.toArray(new String[0]), 5000L); - return null != rs && rs.contains("exists_" + pid); - } catch (Exception e) { - // todo when thread catch exception , it should not be return false - logger.warn("Method isProcessAlive failed, " + e.getMessage()); - return false; - } - } - - private void killProcessByKillCmd(String pid, String desc ) { - String k15cmd = "sudo kill " + pid; - String k9cmd = "sudo kill -9 " + pid; - int tryNum = 0; - try { - while (isProcessAlive(pid) && tryNum <= 3) { - logger.info("{} still alive with pid({}), use shell command to kill it. try {}++", desc, pid, tryNum++); - if (tryNum <= 3) { - Utils.exec(k15cmd.split(" "), 3000L); - } else { - logger.info("{} still alive with pid({}). try {}, use shell command to kill -9 it", desc, pid, tryNum); - Utils.exec(k9cmd.split(" "), 3000L); - } - Thread.sleep(5000); - } - } catch (InterruptedException e) { - logger.error("Interrupted while killing engine {} with pid({})." + desc, pid); - } - } -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala deleted file mode 100644 index 4b9a59b4d7..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnListService.scala +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.DataWorkCloudApplication -import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.ecm.core.engineconn.{EngineConn, YarnEngineConn} -import org.apache.linkis.ecm.core.launch.EngineConnLaunchRunner -import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.converter.ECMEngineConverter -import org.apache.linkis.ecm.server.listener._ -import org.apache.linkis.ecm.server.service.EngineConnListService -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.resource.{Resource, ResourceType} -import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest - -import org.apache.commons.lang3.StringUtils - -import java.util -import java.util.concurrent.ConcurrentHashMap - -import scala.collection.JavaConverters._ - -import com.google.common.collect.Interners - -class DefaultEngineConnListService - extends EngineConnListService - with ECMEventListener - with Logging { - - /** - * key:tickedId,value :engineConn - */ - private val engineConnMap = new ConcurrentHashMap[String, EngineConn] - - private var engineConnKillService: DefaultEngineConnKillService = _ - - val lock = Interners.newWeakInterner[String] - - override def init(): Unit = {} - - override def getEngineConn(engineConnId: String): Option[EngineConn] = Option( - engineConnMap.get(engineConnId) - ) - - override def getEngineConns: util.List[EngineConn] = - new util.ArrayList[EngineConn](engineConnMap.values()) - - override def addEngineConn(engineConn: EngineConn): Unit = { - logger.info(s"add engineConn ${engineConn.getServiceInstance} to engineConnMap") - if (LinkisECMApplication.isReady) { - engineConnMap.put(engineConn.getTickedId, engineConn) - } - } - - override def killEngineConn(engineConnId: String): Unit = { - var conn = engineConnMap.get(engineConnId) - if (conn != null) engineConnId.intern().synchronized { - conn = engineConnMap.get(engineConnId) - if (conn != null) { - Utils.tryAndWarn { - if (NodeStatus.Failed == conn.getStatus && StringUtils.isNotBlank(conn.getPid)) { - killECByEngineConnKillService(conn) - } else { - getEngineConnKillService().killYarnAppIdOfOneEc(conn) - } - conn.close() - } - engineConnMap.remove(engineConnId) - logger.info(s"engineconn ${conn.getServiceInstance} was closed.") - } - } - } - - override def getUsedResources: Resource = engineConnMap - .values() - .asScala - .map(_.getResource.getMinResource) - .fold(Resource.initResource(ResourceType.Default))(_ + _) - - override def submit(runner: EngineConnLaunchRunner): Option[EngineConn] = { - None - } - - def updateYarnAppId(event: YarnAppIdCallbackEvent): Unit = { - updateYarnEngineConn( - x => x.setApplicationId(event.protocol.applicationId), - event.protocol.nodeId - ) - } - - def updateYarnEngineConn(implicit - updateFunction: YarnEngineConn => Unit, - nodeId: String - ): Unit = { - lock.intern(nodeId) synchronized { - engineConnMap.get(nodeId) match { - case e: YarnEngineConn => updateFunction(e) - case e: EngineConn => - engineConnMap.put(nodeId, ECMEngineConverter.engineConn2YarnEngineConn(e)) - } - } - } - - def updateEngineConn(updateFunction: EngineConn => Unit, nodeId: String): Unit = { - lock.intern(nodeId) synchronized { - engineConnMap.get(nodeId) match { - case e: EngineConn => updateFunction(e) - case _ => - } - } - } - - def updateYarnInfo(event: YarnInfoCallbackEvent): Unit = { - updateYarnEngineConn(x => x.setApplicationURL(event.protocol.uri), event.protocol.nodeId) - } - - def updatePid(event: EngineConnPidCallbackEvent): Unit = { - updateEngineConn( - x => { - x.setPid(event.protocol.pid) - x.setServiceInstance(event.protocol.serviceInstance) - }, - event.protocol.ticketId - ) - } - - def updateEngineConnStatus(tickedId: String, updateStatus: NodeStatus): Unit = { - updateEngineConn(x => x.setStatus(updateStatus), tickedId) - if (NodeStatus.isCompleted(updateStatus)) { - logger.info(s" from engineConnMap to remove engineconn ticketId ${tickedId}") - killEngineConn(tickedId) - } - } - - override def onEvent(event: ECMEvent): Unit = { - logger.info(s"Deal event $event") - event match { - case event: ECMClosedEvent => shutdownEngineConns(event) - case event: YarnAppIdCallbackEvent => updateYarnAppId(event) - case event: YarnInfoCallbackEvent => updateYarnInfo(event) - case event: EngineConnPidCallbackEvent => updatePid(event) - case EngineConnAddEvent(engineConn) => addEngineConn(engineConn) - case EngineConnStatusChangeEvent(tickedId, updateStatus) => - updateEngineConnStatus(tickedId, updateStatus) - case _ => - } - } - - private def getEngineConnKillService(): DefaultEngineConnKillService = { - if (engineConnKillService == null) { - val applicationContext = DataWorkCloudApplication.getApplicationContext - engineConnKillService = applicationContext.getBean(classOf[DefaultEngineConnKillService]) - } - engineConnKillService - } - - private def shutdownEngineConns(event: ECMClosedEvent): Unit = { - logger.info("start to kill all engines belonging the ecm") - engineConnMap - .values() - .asScala - .foreach(engineconn => { - killECByEngineConnKillService(engineconn) - }) - logger.info("Done! success to kill all engines belonging the ecm") - } - - private def killECByEngineConnKillService(engineconn: EngineConn): Unit = { - logger.info(s"start to kill ec by engineConnKillService ${engineconn.getServiceInstance}") - val engineStopRequest = new EngineStopRequest() - engineStopRequest.setServiceInstance(engineconn.getServiceInstance) - getEngineConnKillService().dealEngineConnStop(engineStopRequest) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala deleted file mode 100644 index 6fb2d4700d..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnPidCallbackService.scala +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.listener.EngineConnPidCallbackEvent -import org.apache.linkis.ecm.server.service.EngineConnPidCallbackService -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid -import org.apache.linkis.rpc.message.annotation.Receiver - -class DefaultEngineConnPidCallbackService extends EngineConnPidCallbackService with Logging { - - @Receiver - override def dealPid(protocol: ResponseEngineConnPid): Unit = { - // 1.设置pid - // 2.设置serviceInstance - // 3.状态为running - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnPidCallbackEvent(protocol) - ) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala deleted file mode 100644 index af627afece..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultEngineConnStatusCallbackService.scala +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME -import org.apache.linkis.ecm.server.listener.EngineConnStatusChangeEvent -import org.apache.linkis.ecm.server.service.EngineConnStatusCallbackService -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus.{Failed, Running} -import org.apache.linkis.manager.common.protocol.engine.{ - EngineConnStatusCallback, - EngineConnStatusCallbackToAM -} -import org.apache.linkis.rpc.Sender -import org.apache.linkis.rpc.message.annotation.Receiver - -import org.springframework.stereotype.Service - -@Service -class DefaultEngineConnStatusCallbackService extends EngineConnStatusCallbackService with Logging { - - @Receiver - override def dealEngineConnStatusCallback(protocol: EngineConnStatusCallback): Unit = { - logger.info(s"Start to deal EngineConnStatusCallback $protocol") - - if (NodeStatus.isAvailable(protocol.status)) { - - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(protocol.ticketId, Running) - ) - } else { - - Sender - .getSender(MANAGER_SERVICE_NAME) - .send( - EngineConnStatusCallbackToAM( - protocol.serviceInstance, - protocol.status, - protocol.initErrorMsg - ) - ) - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(protocol.ticketId, Failed) - ) - } - - logger.info(s"Finished to deal EngineConnStatusCallback $protocol") - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala deleted file mode 100644 index 0bb2e1366f..0000000000 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/DefaultYarnCallbackService.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ecm.server.service.impl - -import org.apache.linkis.ecm.server.LinkisECMApplication -import org.apache.linkis.ecm.server.listener.{YarnAppIdCallbackEvent, YarnInfoCallbackEvent} -import org.apache.linkis.ecm.server.service.YarnCallbackService -import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol} -import org.apache.linkis.rpc.message.annotation.Receiver - -class DefaultYarnCallbackService extends YarnCallbackService { - - @Receiver - override def dealApplicationId(protocol: YarnAPPIdCallbackProtocol): Unit = { - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - YarnAppIdCallbackEvent(protocol) - ) - } - - @Receiver - override def dealApplicationURI(protocol: YarnInfoCallbackProtocol): Unit = { - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll(YarnInfoCallbackEvent(protocol)) - } - -} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala new file mode 100644 index 0000000000..764a704887 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ECMListenerService.scala @@ -0,0 +1,54 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ecm.server.service.impl + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.ecm.core.listener.{ECMEvent, ECMEventListener} +import org.apache.linkis.ecm.server.listener.EngineConnStopEvent +import org.apache.linkis.ecm.server.service.EngineConnKillService +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus + +class ECMListenerService extends ECMEventListener with Logging { + + private var engineConnKillService: EngineConnKillService = _ + + override def onEvent(event: ECMEvent): Unit = event match { + case EngineConnStopEvent(engineConn, engineStopRequest) => + if (NodeStatus.Failed == engineConn.getStatus) { + logger.info("deal stopEvent to kill ec {}", engineStopRequest) + engineConnKillService.dealEngineConnStop(engineStopRequest) + } else { + if (engineConnKillService.isInstanceOf[DefaultEngineConnKillService]) { + logger.info("deal stopEvent to kill yarn app {}", engineStopRequest) + engineConnKillService + .asInstanceOf[DefaultEngineConnKillService] + .killYarnAppIdOfOneEc(engineStopRequest) + } + } + case _ => + } + + def getEngineConnKillService(): EngineConnKillService = { + engineConnKillService + } + + def setEngineConnKillService(engineConnKillService: EngineConnKillService): Unit = { + this.engineConnKillService = engineConnKillService + } + +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala index ad58ba723f..dbb65b0059 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/impl/ProcessEngineConnLaunchService.scala @@ -19,31 +19,27 @@ package org.apache.linkis.ecm.server.service.impl import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.Utils -import org.apache.linkis.ecm.core.conf.ECMErrorCode import org.apache.linkis.ecm.core.engineconn.EngineConn import org.apache.linkis.ecm.core.launch.ProcessEngineConnLaunch -import org.apache.linkis.ecm.errorcode.EngineconnServerErrorCodeSummary._ import org.apache.linkis.ecm.server.LinkisECMApplication import org.apache.linkis.ecm.server.conf.ECMConfiguration import org.apache.linkis.ecm.server.conf.ECMConfiguration.MANAGER_SERVICE_NAME -import org.apache.linkis.ecm.server.exception.ECMErrorException -import org.apache.linkis.ecm.server.listener.EngineConnStatusChangeEvent +import org.apache.linkis.ecm.server.listener.EngineConnStopEvent import org.apache.linkis.ecm.server.service.LocalDirsHandleService +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} +import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.enumeration.NodeStatus._ -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM +import org.apache.linkis.manager.common.protocol.engine.{ + EngineConnStatusCallbackToAM, + EngineStopRequest +} import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnLaunchRequest import org.apache.linkis.manager.label.utils.LabelUtil import org.apache.linkis.rpc.Sender import org.apache.commons.io.IOUtils -import org.apache.commons.lang3.StringUtils -import org.apache.commons.lang3.exception.ExceptionUtils - -import java.util.concurrent.TimeUnit -import scala.concurrent.{Future, TimeoutException} -import scala.concurrent.duration.Duration +import scala.concurrent.Future abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchService { @@ -52,110 +48,78 @@ abstract class ProcessEngineConnLaunchService extends AbstractEngineConnLaunchSe def setLocalDirsHandleService(localDirsHandleService: LocalDirsHandleService): Unit = this.localDirsHandleService = localDirsHandleService - override def waitEngineConnStart( + override def startEngineConnMonitorStart( request: EngineConnLaunchRequest, - conn: EngineConn, - duration: Long + conn: EngineConn ): Unit = { conn.getEngineConnLaunchRunner.getEngineConnLaunch match { case launch: ProcessEngineConnLaunch => - Utils.tryCatch { - // Set the pid of the shell script before the pid callBack returns - launch.getPid().foreach(conn.setPid) - processMonitorThread(conn, launch, duration) - } { case e: Throwable => - val logPath = Utils.tryCatch(conn.getEngineConnManagerEnv.engineConnLogDirs) { t => - localDirsHandleService.getEngineConnLogDir( - request.user, - request.ticketId, - LabelUtil.getEngineType(request.labels) - ) - } - val canRetry = e match { - case ecmError: ECMErrorException => - if (ECMErrorCode.EC_START_TIME_OUT == ecmError.getErrCode) { - true - } else if (StringUtils.isBlank(ecmError.getDesc)) { - logger.info("exception desc is null, can be retry") - true - } else { - false - } - case _ => false - } + launch.getPid().foreach(conn.setPid) + processMonitorThread(conn, launch) + case _ => + } + } + + private def processMonitorThread( + engineConn: EngineConn, + launch: ProcessEngineConnLaunch + ): Unit = { + Future { + val tickedId = engineConn.getTickedId + val errorMsg = new StringBuilder + val taskId = + JobUtils.getJobIdFromStringMap(launch.getEngineConnLaunchRequest.creationDesc.properties) + LoggerUtils.setJobIdMDC(taskId) + Utils.tryAndWarnMsg { + val iterator = + IOUtils.lineIterator(launch.getProcessInputStream, Configuration.BDP_ENCODING.getValue) + var count = 0 + val maxLen = ECMConfiguration.ENGINE_START_ERROR_MSG_MAX_LEN.getValue + while (launch.isAlive && iterator.hasNext && count < maxLen) { + val line = iterator.next() + errorMsg.append(line).append("\n") + count += 1 + } + val exitCode = launch.processWaitFor + val engineType = LabelUtil.getEngineType(launch.getEngineConnLaunchRequest.labels) + val logPath = Utils.tryCatch(engineConn.getEngineConnManagerEnv.engineConnLogDirs) { t => + localDirsHandleService.getEngineConnLogDir( + launch.getEngineConnLaunchRequest.user, + tickedId, + engineType + ) + } + if (exitCode != 0) { + val canRetry = if (errorMsg.isEmpty) true else false logger.warn( - s"Failed to init ${conn.getServiceInstance}, status shutting down, canRetry $canRetry, logPath $logPath", - e + s"Failed to start ec ${engineConn.getServiceInstance}, status shutting down exit code ${exitCode}, canRetry ${canRetry}, logPath ${logPath}" ) Sender .getSender(MANAGER_SERVICE_NAME) .send( EngineConnStatusCallbackToAM( - conn.getServiceInstance, + engineConn.getServiceInstance, NodeStatus.ShuttingDown, - "Failed to start EngineConn, reason: " + ExceptionUtils.getRootCauseMessage( - e - ) + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", + "Failed to start EngineConn, reason: " + errorMsg + s"\n You can go to this path($logPath) to find the reason or ask the administrator for help", canRetry ) ) - throw e + engineConn.setStatus(NodeStatus.ShuttingDown) + } else { + engineConn.setStatus(NodeStatus.Success) } - case _ => - } - } - - private def processMonitorThread( - engineConn: EngineConn, - launch: ProcessEngineConnLaunch, - timeout: Long - ): Unit = { - val isCompleted: EngineConn => Boolean = engineConn => - engineConn.getStatus == Success || engineConn.getStatus == Failed - val tickedId = engineConn.getTickedId - val errorMsg = new StringBuilder - Future { - val iterator = - IOUtils.lineIterator(launch.getProcessInputStream, Configuration.BDP_ENCODING.getValue) - var count = 0 - val maxLen = ECMConfiguration.ENGINE_START_ERROR_MSG_MAX_LEN.getValue - while (!isCompleted(engineConn) && iterator.hasNext && count < maxLen) { - val line = iterator.next() - errorMsg.append(line).append("\n") - count += 1 - } - val exitCode = Option(launch.processWaitFor) - if (exitCode.exists(_ != 0)) { - logger.info(s"engine ${tickedId} process exit ") - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(tickedId, ShuttingDown) - ) - } else { - LinkisECMApplication.getContext.getECMSyncListenerBus.postToAll( - EngineConnStatusChangeEvent(tickedId, Success) - ) - } - } - Utils.tryThrow( - Utils - .waitUntil(() => engineConn.getStatus != Starting, Duration(timeout, TimeUnit.MILLISECONDS)) - ) { - case e: TimeoutException => - throw new ECMErrorException( - EC_START_TIME_OUT.getErrorCode, - EC_START_TIME_OUT.getErrorDesc + s" $engineConn ." - ) - case e: InterruptedException => // 比如被ms cancel - throw new ECMErrorException( - EC_INTERRUPT_TIME_OUT.getErrorCode, - EC_INTERRUPT_TIME_OUT.getErrorDesc + s" $engineConn ." + val engineStopRequest = new EngineStopRequest + engineStopRequest.setEngineType(engineType) + engineStopRequest.setUser(launch.getEngineConnLaunchRequest.user) + engineStopRequest.setIdentifier(engineConn.getPid) + engineStopRequest.setIdentifierType(AMConstant.PROCESS_MARK) + engineStopRequest.setLogDirSuffix(logPath) + engineStopRequest.setServiceInstance(engineConn.getServiceInstance) + LinkisECMApplication.getContext.getECMAsyncListenerBus.post( + EngineConnStopEvent(engineConn, engineStopRequest) ) - case t: Throwable => - logger.error(s"unexpected error, now shutdown it.") - throw t - } - if (engineConn.getStatus == ShuttingDown) { - throw new ECMErrorException(EC_START_FAILED.getErrorCode, errorMsg.toString()) + } { s"EngineConns: ${engineConn.getServiceInstance} monitor Failed" } + LoggerUtils.removeJobIdMDC() } } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala index 4a684bbec1..ec65cd885f 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/spring/ECMSpringConfiguration.scala @@ -19,7 +19,7 @@ package org.apache.linkis.ecm.server.spring import org.apache.linkis.ecm.core.listener.ECMEventListener import org.apache.linkis.ecm.server.context.{DefaultECMContext, ECMContext} -import org.apache.linkis.ecm.server.service._ +import org.apache.linkis.ecm.server.service.{EngineConnKillService, _} import org.apache.linkis.ecm.server.service.impl._ import org.springframework.beans.factory.annotation.Autowired @@ -39,16 +39,9 @@ class ECMSpringConfiguration { new DefaultECMContext } - @Bean - @ConditionalOnMissingBean - def getDefaultYarnCallbackService: YarnCallbackService = { - new DefaultYarnCallbackService - } - @Bean @ConditionalOnMissingBean def getBmlResourceLocalizationService( - context: ECMContext, localDirsHandleService: LocalDirsHandleService ): ResourceLocalizationService = { val service: BmlResourceLocalizationService = new BmlResourceLocalizationService @@ -57,32 +50,12 @@ class ECMSpringConfiguration { service } - @Bean - @ConditionalOnMissingBean - def getDefaultLogCallbackService: LogCallbackService = { - null - } - @Bean @ConditionalOnMissingBean def getDefaultlocalDirsHandleService: LocalDirsHandleService = { new DefaultLocalDirsHandleService } - @Bean - @ConditionalOnMissingBean - def getDefaultEngineConnPidCallbackService: EngineConnPidCallbackService = { - new DefaultEngineConnPidCallbackService - } - - @Bean - @ConditionalOnMissingBean - def getDefaultEngineConnListService(context: ECMContext): EngineConnListService = { - implicit val service: DefaultEngineConnListService = new DefaultEngineConnListService - registerSyncListener(context) - service - } - @Bean @ConditionalOnMissingBean def getLinuxProcessEngineConnLaunchService( @@ -98,38 +71,44 @@ class ECMSpringConfiguration { @Bean @ConditionalOnMissingBean def getDefaultECMRegisterService(context: ECMContext): ECMRegisterService = { - implicit val service: DefaultECMRegisterService = new DefaultECMRegisterService - registerSyncListener(context) + val service: DefaultECMRegisterService = new DefaultECMRegisterService + registerSyncListener(context, service) service } @Bean @ConditionalOnMissingBean def getDefaultECMHealthService(context: ECMContext): ECMHealthService = { - implicit val service: DefaultECMHealthService = new DefaultECMHealthService - registerSyncListener(context) + val service: DefaultECMHealthService = new DefaultECMHealthService + registerSyncListener(context, service) service } @Bean @ConditionalOnMissingBean def getDefaultEngineConnKillService( - engineConnListService: EngineConnListService ): EngineConnKillService = { val service = new DefaultEngineConnKillService - service.setEngineConnListService(engineConnListService) service } - private def registerSyncListener( + @Bean + @ConditionalOnMissingBean + def getECMListenerService( + engineConnKillService: EngineConnKillService, context: ECMContext - )(implicit listener: ECMEventListener): Unit = { + ): ECMListenerService = { + val service: ECMListenerService = new ECMListenerService + service.setEngineConnKillService(engineConnKillService) + registerASyncListener(context, service) + service + } + + private def registerSyncListener(context: ECMContext, listener: ECMEventListener): Unit = { context.getECMSyncListenerBus.addListener(listener) } - private def registerASyncListener( - context: ECMContext - )(implicit listener: ECMEventListener): Unit = { + private def registerASyncListener(context: ECMContext, listener: ECMEventListener): Unit = { context.getECMAsyncListenerBus.addListener(listener) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala index cd4029bae6..2586576bb7 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/org/apache/linkis/engineconn/once/executor/hook/OnceEngineConnHook.scala @@ -28,11 +28,6 @@ import org.apache.linkis.manager.common.entity.enumeration.NodeStatus */ class OnceEngineConnHook extends CallbackEngineConnHook { - override protected def getNodeStatusOfStartSuccess( - engineCreationContext: EngineCreationContext, - engineConn: EngineConn - ): NodeStatus = NodeStatus.Unlock - override def afterEngineServerStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java index de6bb440dd..979b19ed46 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/java/org/apache/linkis/engineconn/computation/concurrent/monitor/TimingMonitorService.java @@ -55,7 +55,7 @@ public class TimingMonitorService implements InitializingBean, Runnable { @Override public void afterPropertiesSet() throws Exception { - if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM()) { + if ((Boolean) (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM().getValue())) { Utils.defaultScheduler() .scheduleAtFixedRate( this, 3 * 60 * 1000, MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS); diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala index 8876a50c37..6f73f67fe6 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncEngineConnJob.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.computation.executor.async import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.scheduler.executer.{ CompletedExecuteResponse, ErrorExecuteResponse, @@ -27,6 +28,7 @@ import org.apache.linkis.scheduler.executer.{ SuccessExecuteResponse } import org.apache.linkis.scheduler.queue.{Job, JobInfo} +import org.apache.linkis.scheduler.queue.SchedulerEventState.SchedulerEventState class AsyncEngineConnJob(task: EngineConnTask, engineExecutionContext: EngineExecutionContext) extends Job { @@ -47,6 +49,14 @@ class AsyncEngineConnJob(task: EngineConnTask, engineExecutionContext: EngineExe override def close(): Unit = {} + override def transition(state: SchedulerEventState): Unit = Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + super.transition(state) + } { + LoggerUtils.removeJobIdMDC() + } + override def transitionCompleted(executeCompleted: CompletedExecuteResponse): Unit = { var executeCompletedNew: CompletedExecuteResponse = executeCompleted executeCompleted match { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala index a27d3f029a..46332b93fd 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/async/AsyncExecuteExecutor.scala @@ -17,8 +17,10 @@ package org.apache.linkis.engineconn.computation.executor.async +import org.apache.linkis.common.utils.Utils import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.computation.executor.utlis.ComputationErrorCode +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.executer.ExecutorState.ExecutorState @@ -31,10 +33,16 @@ class AsyncExecuteExecutor(executor: AsyncConcurrentComputationExecutor) extends override def execute(executeRequest: ExecuteRequest): ExecuteResponse = { executeRequest match { case asyncExecuteRequest: AsyncExecuteRequest => - executor.asyncExecuteTask( - asyncExecuteRequest.task, - asyncExecuteRequest.engineExecutionContext - ) + Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(asyncExecuteRequest.task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + executor.asyncExecuteTask( + asyncExecuteRequest.task, + asyncExecuteRequest.engineExecutionContext + ) + } { + LoggerUtils.removeJobIdMDC() + } case _ => throw EngineConnException( ComputationErrorCode.ASYNC_EXECUTOR_ERROR_CODE, diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala index 3959eb942b..98f04daaa2 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/bml/BmlEnginePreExecuteHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.computation.executor.bml import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory} import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.engineconn.computation.executor.hook.ComputationExecutorHook @@ -62,7 +63,7 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { ): String = { val props = engineExecutionContext.getProperties if (null != props && props.containsKey(GovernanceConstant.TASK_RESOURCES_STR)) { - val workDir = ComputationEngineUtils.getCurrentWorkDir +// val workDir = ComputationEngineUtils.getCurrentWorkDir val jobId = engineExecutionContext.getJobId props.get(GovernanceConstant.TASK_RESOURCES_STR) match { case resources: util.List[Object] => @@ -71,9 +72,9 @@ class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging { val fileName = resource.get(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR).toString val resourceId = resource.get(GovernanceConstant.TASK_RESOURCE_ID_STR).toString val version = resource.get(GovernanceConstant.TASK_RESOURCE_VERSION_STR).toString - val fullPath = - if (workDir.endsWith(seperator)) pathType + workDir + fileName - else pathType + workDir + seperator + fileName + val fullPath = fileName +// if (workDir.endsWith(seperator)) pathType + workDir + fileName +// else pathType + workDir + seperator + fileName val response = Utils.tryCatch { bmlClient.downloadShareResource(processUser, resourceId, version, fullPath, true) } { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala index f59adaadef..fe98e3328e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/cs/CSResourceParser.scala @@ -17,7 +17,10 @@ package org.apache.linkis.engineconn.computation.executor.cs +import org.apache.linkis.common.utils.Logging import org.apache.linkis.cs.client.service.CSResourceService +import org.apache.linkis.engineconn.common.conf.EngineConnConf +import org.apache.linkis.governance.common.utils.GovernanceConstant import org.apache.commons.lang3.StringUtils @@ -27,7 +30,7 @@ import java.util.regex.Pattern import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer -class CSResourceParser { +class CSResourceParser extends Logging { private val pb = Pattern.compile("cs://[^\\s\"]+[$\\s]{0,1}", Pattern.CASE_INSENSITIVE) @@ -47,7 +50,6 @@ class CSResourceParser { nodeNameStr: String ): String = { - // TODO getBMLResource val bmlResourceList = CSResourceService.getInstance().getUpstreamBMLResource(contextIDValueStr, nodeNameStr) @@ -56,23 +58,25 @@ class CSResourceParser { val preFixNames = new ArrayBuffer[String]() val parsedNames = new ArrayBuffer[String]() + val prefixName = System.currentTimeMillis().toString + "_" preFixResourceNames.foreach { preFixResourceName => val resourceName = preFixResourceName.replace(PREFIX, "").trim val bmlResourceOption = bmlResourceList.asScala.find(_.getDownloadedFileName.equals(resourceName)) if (bmlResourceOption.isDefined) { + val replacementName = EngineConnConf.getEngineTmpDir + prefixName + resourceName val bmlResource = bmlResourceOption.get val map = new util.HashMap[String, Object]() - map.put("resourceId", bmlResource.getResourceId) - map.put("version", bmlResource.getVersion) - map.put("fileName", resourceName) + map.put(GovernanceConstant.TASK_RESOURCE_ID_STR, bmlResource.getResourceId) + map.put(GovernanceConstant.TASK_RESOURCE_VERSION_STR, bmlResource.getVersion) + map.put(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR, replacementName) parsedResources.add(map) preFixNames.append(preFixResourceName) - parsedNames.append(resourceName) + parsedNames.append(replacementName) + logger.warn(s"Replace cs file from {$preFixResourceName} to {$replacementName}") } - } - props.put("resources", parsedResources) + props.put(GovernanceConstant.TASK_RESOURCES_STR, parsedResources) StringUtils.replaceEach(code, preFixNames.toArray, parsedNames.toArray) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala index 780db4215a..96e5a7d480 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala @@ -21,7 +21,11 @@ import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor -import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskStatusChangedEvent +import org.apache.linkis.engineconn.acessible.executor.listener.event.{ + TaskLogUpdateEvent, + TaskResponseErrorEvent, + TaskStatusChangedEvent +} import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask @@ -35,6 +39,7 @@ import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.paser.CodeParser import org.apache.linkis.governance.common.protocol.task.{EngineConcurrentInfo, RequestTask} +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel import org.apache.linkis.protocol.engine.JobProgressInfo @@ -131,6 +136,12 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) override def close(): Unit = { if (null != lastTask) CLOSE_LOCKER.synchronized { + listenerBusContext.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + lastTask.getTaskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) killTask(lastTask.getTaskId) } else { @@ -242,12 +253,15 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) response = response match { case _: OutputExecuteResponse => succeedTasks.increase() - transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed) SuccessExecuteResponse() case s: SuccessExecuteResponse => succeedTasks.increase() - transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed) s + case incompleteExecuteResponse: IncompleteExecuteResponse => + ErrorExecuteResponse( + s"The task cannot be an incomplete response ${incompleteExecuteResponse.message}", + null + ) case _ => response } response @@ -257,20 +271,34 @@ abstract class ComputationExecutor(val outputPrintLimit: Int = 1000) } } - def execute(engineConnTask: EngineConnTask): ExecuteResponse = { + def execute(engineConnTask: EngineConnTask): ExecuteResponse = Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(engineConnTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) logger.info(s"start to execute task ${engineConnTask.getTaskId}") updateLastActivityTime() beforeExecute(engineConnTask) taskCache.put(engineConnTask.getTaskId, engineConnTask) lastTask = engineConnTask val response = ensureOp { - toExecuteTask(engineConnTask) + val executeResponse = toExecuteTask(engineConnTask) + executeResponse match { + case successExecuteResponse: SuccessExecuteResponse => + transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed) + case errorExecuteResponse: ErrorExecuteResponse => + listenerBusContext.getEngineConnSyncListenerBus.postToAll( + TaskResponseErrorEvent(engineConnTask.getTaskId, errorExecuteResponse.message) + ) + transformTaskStatus(engineConnTask, ExecutionNodeStatus.Failed) + } + executeResponse } Utils.tryAndWarn(afterExecute(engineConnTask, response)) logger.info(s"Finished to execute task ${engineConnTask.getTaskId}") // lastTask = null response + } { + LoggerUtils.removeJobIdMDC() } def setCodeParser(codeParser: CodeParser): Unit = this.codeParser = Some(codeParser) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala index 08124f2225..b97bb1cd9e 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala @@ -188,7 +188,6 @@ class EngineExecutionContext(executor: ComputationExecutor, executorUser: String logger.info(log) } else { val listenerBus = getEngineSyncListenerBus - // jobId.foreach(jId => listenerBus.post(TaskLogUpdateEvent(jId, log))) getJobId.foreach(jId => listenerBus.postToAll(TaskLogUpdateEvent(jId, log))) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala index d22bf3f800..e5ccd2bfbe 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala @@ -25,11 +25,6 @@ import org.apache.linkis.manager.common.entity.enumeration.NodeStatus class ComputationEngineConnHook extends CallbackEngineConnHook { - override protected def getNodeStatusOfStartSuccess( - engineCreationContext: EngineCreationContext, - engineConn: EngineConn - ): NodeStatus = NodeStatus.Unlock - override def afterEngineServerStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala index f96896f557..4446bdc672 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/metrics/ComputationEngineConnMetrics.scala @@ -91,4 +91,12 @@ object ComputationEngineConnMetrics { getTotalBusyTimeMills(nodeStatus) + getTotalIdleTimeMills(nodeStatus) def getUnlockToShutdownDurationMills(): Long = unlockToShutdownDurationMills.get() + + def getLastUnlockTimestamp(nodeStatus: NodeStatus): Long = { + nodeStatus match { + case NodeStatus.Unlock => lastUnlockTimeMills + case _ => 0 + } + } + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala index eccf54bfad..e5d74282de 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/DefaultNodeHeartbeatMsgManager.scala @@ -21,10 +21,13 @@ import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.engineconn.acessible.executor.info.NodeHeartbeatMsgManager import org.apache.linkis.engineconn.computation.executor.metrics.ComputationEngineConnMetrics import org.apache.linkis.engineconn.core.EngineConnObject -import org.apache.linkis.engineconn.executor.entity.{Executor, SensibleExecutor} +import org.apache.linkis.engineconn.executor.entity.{Executor, SensibleExecutor, YarnExecutor} import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.server.BDPJettyServerHelper +import org.apache.commons.lang3.StringUtils + import org.springframework.stereotype.Component import java.util @@ -63,6 +66,10 @@ class DefaultNodeHeartbeatMsgManager extends NodeHeartbeatMsgManager with Loggin ECConstants.EC_TOTAL_LOCK_TIME_MILLS_KEY, ComputationEngineConnMetrics.getTotalLockTimeMills(status).asInstanceOf[Object] ) + msgMap.put( + ECConstants.EC_LAST_UNLOCK_TIMESTAMP, + ComputationEngineConnMetrics.getLastUnlockTimestamp(status).asInstanceOf[Object] + ) case _ => } val engineParams = EngineConnObject.getEngineCreationContext.getOptions @@ -72,6 +79,22 @@ class DefaultNodeHeartbeatMsgManager extends NodeHeartbeatMsgManager with Loggin engineParams.get(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY).asInstanceOf[Object] ) } + executor match { + case yarnExecutor: YarnExecutor => + if (StringUtils.isNotBlank(yarnExecutor.getQueue)) { + msgMap.put(ECConstants.YARN_QUEUE_NAME_KEY, yarnExecutor.getQueue) + } + if (StringUtils.isNotBlank(yarnExecutor.getApplicationId)) { + msgMap.put(ECConstants.YARN_APPID_NAME_KEY, yarnExecutor.getApplicationId) + } + if (StringUtils.isNotBlank(yarnExecutor.getApplicationURL)) { + msgMap.put(ECConstants.YARN_APP_URL_KEY, yarnExecutor.getApplicationURL) + } + if (StringUtils.isNotBlank(yarnExecutor.getYarnMode)) { + msgMap.put(ECConstants.YARN_MODE_KEY, yarnExecutor.getYarnMode) + } + case _ => + } Utils.tryCatch(BDPJettyServerHelper.gson.toJson(msgMap)) { case e: Exception => val msgs = msgMap.asScala .map { case (k, v) => if (null == v) s"${k}->null" else s"${k}->${v.toString}" } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala index 651fc0f3dc..82a49758a5 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala @@ -58,7 +58,7 @@ import org.apache.linkis.governance.common.exception.engineconn.{ EngineConnExecutorErrorException } import org.apache.linkis.governance.common.protocol.task._ -import org.apache.linkis.governance.common.utils.JobUtils +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.resource.{ ResponseTaskRunningInfo, @@ -170,82 +170,92 @@ class TaskExecutionServiceImpl } @Receiver - override def execute(requestTask: RequestTask, sender: Sender): ExecuteResponse = { - - // check lock - logger.info("Received a new task, task content is " + requestTask) - if (StringUtils.isBlank(requestTask.getLock)) { - logger.error(s"Invalid lock : ${requestTask.getLock} , requestTask : " + requestTask) - return ErrorExecuteResponse( - s"Invalid lock : ${requestTask.getLock}.", - new EngineConnExecutorErrorException( - EngineConnExecutorErrorCode.INVALID_PARAMS, - "Invalid lock or code(请获取到锁后再提交任务.)" + override def execute(requestTask: RequestTask, sender: Sender): ExecuteResponse = + Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(requestTask.getProperties) + LoggerUtils.setJobIdMDC(jobId) + // check lock + logger.info("Received a new task, task content is " + requestTask) + if (StringUtils.isBlank(requestTask.getLock)) { + logger.error(s"Invalid lock : ${requestTask.getLock} , requestTask : " + requestTask) + return ErrorExecuteResponse( + s"Invalid lock : ${requestTask.getLock}.", + new EngineConnExecutorErrorException( + EngineConnExecutorErrorCode.INVALID_PARAMS, + "Invalid lock or code(请获取到锁后再提交任务.)" + ) ) - ) - } - if (!lockService.isLockExist(requestTask.getLock)) { - logger.error(s"Lock ${requestTask.getLock} not exist, cannot execute.") - return ErrorExecuteResponse( - "Lock not exixt", - new EngineConnExecutorErrorException( - EngineConnExecutorErrorCode.INVALID_LOCK, - "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)." + } + if (!lockService.isLockExist(requestTask.getLock)) { + logger.error(s"Lock ${requestTask.getLock} not exist, cannot execute.") + return ErrorExecuteResponse( + "Lock not exixt", + new EngineConnExecutorErrorException( + EngineConnExecutorErrorCode.INVALID_LOCK, + "Lock : " + requestTask.getLock + " not exist(您的锁无效,请重新获取后再提交)." + ) ) - ) - } + } - if (StringUtils.isBlank(requestTask.getCode)) { - return IncompleteExecuteResponse( - "Your code is incomplete, it may be that only comments are selected for execution(您的代码不完整,可能是仅仅选中了注释进行执行)" - ) - } + if (StringUtils.isBlank(requestTask.getCode)) { + return IncompleteExecuteResponse( + "Your code is incomplete, it may be that only comments are selected for execution(您的代码不完整,可能是仅仅选中了注释进行执行)" + ) + } - val taskId: Int = taskExecutedNum.incrementAndGet() - val retryAble: Boolean = { - val retry = - requestTask.getProperties.getOrDefault(ComputationEngineConstant.RETRYABLE_TYPE_NAME, null) - if (null != retry) retry.asInstanceOf[Boolean] - else false - } - val jobId = JobUtils.getJobIdFromMap(requestTask.getProperties) - if (StringUtils.isNotBlank(jobId)) { - System.getProperties.put(ComputationExecutorConf.JOB_ID_TO_ENV_KEY, jobId) - logger.info(s"Received job with id ${jobId}.") - } - val task = new CommonEngineConnTask(String.valueOf(taskId), retryAble) - task.setCode(requestTask.getCode) - task.setProperties(requestTask.getProperties) - task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock) - task.setStatus(ExecutionNodeStatus.Scheduled) - val labels = requestTask.getLabels.asScala.toArray - task.setLabels(labels) - val entranceServerInstance = RPCUtils.getServiceInstanceFromSender(sender) - task.setCallbackServiceInstance(entranceServerInstance) - logger.info(s"task $taskId submit executor to execute") - val runnable = new Runnable { - override def run(): Unit = Utils.tryCatch { - // Waiting to run, preventing task messages from being sent to submit services before SubmitResponse, such as entry - Thread.sleep(ComputationExecutorConf.TASK_SUBMIT_WAIT_TIME_MS) - submitTaskToExecutor(task, labels) match { - case ErrorExecuteResponse(message, throwable) => - sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) - logger.error(message, throwable) - sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) - case _ => + val taskId: Int = taskExecutedNum.incrementAndGet() + val retryAble: Boolean = { + val retry = + requestTask.getProperties.getOrDefault( + ComputationEngineConstant.RETRYABLE_TYPE_NAME, + null + ) + if (null != retry) retry.asInstanceOf[Boolean] + else false + } + + if (StringUtils.isNotBlank(jobId)) { + System.getProperties.put(ComputationExecutorConf.JOB_ID_TO_ENV_KEY, jobId) + logger.info(s"Received job with id ${jobId}.") + } + val task = new CommonEngineConnTask(String.valueOf(taskId), retryAble) + task.setCode(requestTask.getCode) + task.setProperties(requestTask.getProperties) + task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock) + task.setStatus(ExecutionNodeStatus.Scheduled) + val labels = requestTask.getLabels.asScala.toArray + task.setLabels(labels) + val entranceServerInstance = RPCUtils.getServiceInstanceFromSender(sender) + task.setCallbackServiceInstance(entranceServerInstance) + logger.info(s"task $taskId submit executor to execute") + val runnable = new Runnable { + override def run(): Unit = Utils.tryCatch { + // Waiting to run, preventing task messages from being sent to submit services before SubmitResponse, such as entry + Thread.sleep(ComputationExecutorConf.TASK_SUBMIT_WAIT_TIME_MS) + LoggerUtils.setJobIdMDC(jobId) + submitTaskToExecutor(task, labels) match { + case ErrorExecuteResponse(message, throwable) => + sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) + logger.error(message, throwable) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) + case _ => + } + LoggerUtils.removeJobIdMDC() + } { t => + logger.warn("Failed to submit task ", t) + LoggerUtils.removeJobIdMDC() + sendToEntrance( + task, + ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) + ) + sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) } - } { t => - logger.warn("Failed to submit task ", t) - sendToEntrance( - task, - ResponseTaskError(task.getTaskId, ExceptionUtils.getRootCauseMessage(t)) - ) - sendToEntrance(task, ResponseTaskStatus(task.getTaskId, ExecutionNodeStatus.Failed)) } + val submitTaskToExecutorFuture = taskAsyncSubmitExecutor.submit(runnable) + SubmitResponse(task.getTaskId) + } { + LoggerUtils.removeJobIdMDC() } - val submitTaskToExecutorFuture = taskAsyncSubmitExecutor.submit(runnable) - SubmitResponse(task.getTaskId) - } private def submitTaskToExecutor( task: CommonEngineConnTask, @@ -256,11 +266,11 @@ class TaskExecutionServiceImpl case computationExecutor: ComputationExecutor => taskIdCache.put(task.getTaskId, computationExecutor) submitTask(task, computationExecutor) - case o => + case _ => val labelsStr = if (labels != null) labels.filter(_ != null).map(_.getStringValue).mkString(",") else "" val msg = - "Invalid computationExecutor : " + o.getClass.getName + ", labels : " + labelsStr + ", requestTask : " + task.getTaskId + "Invalid computationExecutor : " + executor.getClass.getName + ", labels : " + labelsStr + ", requestTask : " + task.getTaskId logger.error(msg) ErrorExecuteResponse( "Invalid computationExecutor(生成无效的计算引擎,请联系管理员).", @@ -388,18 +398,15 @@ class TaskExecutionServiceImpl new Thread(consumerRunnable) } - private def executeTask(task: EngineConnTask, executor: ComputationExecutor): Unit = { - val response = executor.execute(task) - response match { - case ErrorExecuteResponse(message, throwable) => - sendToEntrance(task, ResponseTaskError(task.getTaskId, message)) - logger.error(message, throwable) - LogHelper.pushAllRemainLogs() - executor.transformTaskStatus(task, ExecutionNodeStatus.Failed) - case _ => logger.warn(s"task get response is $response") + private def executeTask(task: EngineConnTask, executor: ComputationExecutor): Unit = + Utils.tryFinally { + val jobId = JobUtils.getJobIdFromMap(task.getProperties) + LoggerUtils.setJobIdMDC(jobId) + executor.execute(task) + clearCache(task.getTaskId) + } { + LoggerUtils.removeJobIdMDC() } - clearCache(task.getTaskId) - } /** * Open daemon thread @@ -425,56 +432,18 @@ class TaskExecutionServiceImpl .isRunning(task.getStatus) ) { val progressResponse = taskProgress(task.getTaskId) - val resourceResponse: ResponseTaskYarnResource = - taskYarnResource(task.getTaskId) match { - case responseTaskYarnResource: ResponseTaskYarnResource => - if ( - responseTaskYarnResource.resourceMap != null && !responseTaskYarnResource.resourceMap.isEmpty - ) { - responseTaskYarnResource - } else { - null - } - case _ => - null - } - val extraInfoMap = new util.HashMap[String, Object]() - extraInfoMap.put(TaskConstant.ENGINE_INSTANCE, Sender.getThisInstance) - extraInfoMap.put( - ECConstants.EC_TICKET_ID_KEY, - EngineConnObject.getEngineCreationContext.getTicketId - ) - val ecParams = EngineConnObject.getEngineCreationContext.getOptions - if (ecParams.containsKey(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY)) { - extraInfoMap.put( - ECConstants.YARN_QUEUE_NAME_KEY, - ecParams.get(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY) - ) - } - extraInfoMap.put(TaskConstant.ENGINE_CONN_TASK_ID, task.getTaskId) - extraInfoMap.put( - TaskConstant.ENGINE_CONN_SUBMIT_TIME, - System.currentTimeMillis.toString - ) + val resourceResponse = buildResourceMap(task) + val extraInfoMap = buildExtraInfoMap(task) // todo add other info - var respRunningInfo: ResponseTaskRunningInfo = null - if (null != resourceResponse) { - respRunningInfo = ResponseTaskRunningInfo( - progressResponse.execId, - progressResponse.progress, - progressResponse.progressInfo, - resourceResponse.resourceMap, - extraInfoMap - ) - } else { - respRunningInfo = ResponseTaskRunningInfo( - progressResponse.execId, - progressResponse.progress, - progressResponse.progressInfo, - null, - extraInfoMap - ) - } + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null + + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( + progressResponse.execId, + progressResponse.progress, + progressResponse.progressInfo, + resourceMap, + extraInfoMap + ) sendToEntrance(task, respRunningInfo) Thread.sleep(TimeUnit.MILLISECONDS.convert(sleepInterval, TimeUnit.SECONDS)) } @@ -483,6 +452,42 @@ class TaskExecutionServiceImpl }) } + private def buildExtraInfoMap(task: EngineConnTask): util.HashMap[String, Object] = { + val extraInfoMap = new util.HashMap[String, Object]() + extraInfoMap.put(TaskConstant.ENGINE_INSTANCE, Sender.getThisInstance) + extraInfoMap.put( + ECConstants.EC_TICKET_ID_KEY, + EngineConnObject.getEngineCreationContext.getTicketId + ) + val ecParams = EngineConnObject.getEngineCreationContext.getOptions + if (ecParams.containsKey(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY)) { + extraInfoMap.put( + ECConstants.YARN_QUEUE_NAME_KEY, + ecParams.get(ECConstants.YARN_QUEUE_NAME_CONFIG_KEY) + ) + } + extraInfoMap.put(TaskConstant.ENGINE_CONN_TASK_ID, task.getTaskId) + extraInfoMap.put(TaskConstant.ENGINE_CONN_SUBMIT_TIME, System.currentTimeMillis.toString) + extraInfoMap + } + + private def buildResourceMap(task: EngineConnTask): ResponseTaskYarnResource = { + val resourceResponse: ResponseTaskYarnResource = + taskYarnResource(task.getTaskId) match { + case responseTaskYarnResource: ResponseTaskYarnResource => + if ( + responseTaskYarnResource.resourceMap != null && !responseTaskYarnResource.resourceMap.isEmpty + ) { + responseTaskYarnResource + } else { + null + } + case _ => + null + } + resourceResponse + } + private def taskYarnResource(taskID: String): ResponseTaskYarnResource = { val executor = taskIdCache.getIfPresent(taskID) executor match { @@ -503,12 +508,10 @@ class TaskExecutionServiceImpl if (ExecutionNodeStatus.isCompleted(task.getStatus)) { response = ResponseTaskProgress(taskID, 1.0f, null) } else { + val progress = executor.progress(taskID) + logger.info("The latest progress {} of the task id {} is:", progress, taskID) response = Utils.tryQuietly( - ResponseTaskProgress( - taskID, - executor.progress(taskID), - executor.getProgressInfo(taskID) - ) + ResponseTaskProgress(taskID, progress, executor.getProgressInfo(taskID)) ) } } else { @@ -603,7 +606,7 @@ class TaskExecutionServiceImpl logger.warn("Unknown event : " + BDPJettyServerHelper.gson.toJson(event)) } - override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = { + override def onLogUpdate(logUpdateEvent: TaskLogUpdateEvent): Unit = Utils.tryAndWarn { if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { if (null != logUpdateEvent && StringUtils.isNotBlank(logUpdateEvent.taskId)) { val task = getTaskByTaskId(logUpdateEvent.taskId) @@ -660,26 +663,33 @@ class TaskExecutionServiceImpl } } - override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = { - if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { - val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) - if (null != task) { - sendToEntrance( - task, - ResponseTaskProgress( + override def onProgressUpdate(taskProgressUpdateEvent: TaskProgressUpdateEvent): Unit = + Utils.tryAndWarn { + if (EngineConnConf.ENGINE_PUSH_LOG_TO_ENTRANCE.getValue) { + val task = getTaskByTaskId(taskProgressUpdateEvent.taskId) + if (null != task) { + val resourceResponse = buildResourceMap(task) + val extraInfoMap = buildExtraInfoMap(task) + + val resourceMap = if (null != resourceResponse) resourceResponse.resourceMap else null + + val respRunningInfo: ResponseTaskRunningInfo = ResponseTaskRunningInfo( taskProgressUpdateEvent.taskId, taskProgressUpdateEvent.progress, - taskProgressUpdateEvent.progressInfo + taskProgressUpdateEvent.progressInfo, + resourceMap, + extraInfoMap ) - ) - } else { - logger.error( - "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON - .toJson(taskProgressUpdateEvent) - ) + + sendToEntrance(task, respRunningInfo) + } else { + logger.error( + "Task cannot null! taskProgressUpdateEvent : " + ComputationEngineUtils.GSON + .toJson(taskProgressUpdateEvent) + ) + } } } - } override def onResultSetCreated(taskResultCreateEvent: TaskResultCreateEvent): Unit = { logger.info(s"start to deal result event ${taskResultCreateEvent.taskId}") diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala index d06e8ac077..15e70315e3 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/org/apache/linkis/engineconn/computation/executor/upstream/handler/ECTaskKillHandler.scala @@ -39,10 +39,10 @@ class ECTaskKillHandler extends MonitorHandler with Logging { while (elements.hasNext) { val element = elements.next Utils.tryCatch { - doKill(element) logger.error( s"ERROR: entrance : ${element.getUpstreamConnection().getUpstreamServiceInstanceName()} lose connect, will kill job : ${element.getKey()}" ) + doKill(element) } { t => logger.error("Failed to kill job: " + element.getKey, t) } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala deleted file mode 100644 index 42e79c52cc..0000000000 --- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/test/scala/org/apache/linkis/engineconn/computation/executor/upstream/access/ECTaskEntranceInfoAccessHelper.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.engineconn.computation.executor.upstream.access - -import org.apache.linkis.DataWorkCloudApplication -import org.apache.linkis.common.ServiceInstance -import org.apache.linkis.common.conf.{CommonVars, DWCArgumentsParser} -import org.apache.linkis.common.utils.Utils -import org.apache.linkis.engineconn.common.creation.DefaultEngineCreationContext -import org.apache.linkis.engineconn.core.util.EngineConnUtils -import org.apache.linkis.governance.common.conf.GovernanceCommonConf -import org.apache.linkis.governance.common.utils.EngineConnArgumentsParser -import org.apache.linkis.manager.engineplugin.common.launch.process.Environment -import org.apache.linkis.manager.label.builder.factory.{ - LabelBuilderFactory, - LabelBuilderFactoryContext -} -import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.server.conf.ServerConfiguration - -import org.apache.commons.lang3.StringUtils - -import java.util - -import org.slf4j.{Logger, LoggerFactory} - -object ECTaskEntranceInfoAccessHelper { - val logger: Logger = LoggerFactory.getLogger(ECTaskEntranceInfoAccessHelper.getClass) - - val engineCreationContext = new DefaultEngineCreationContext - val labelBuilderFactory: LabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - - def initApp(args: Array[String]): Unit = { - val arguments = EngineConnArgumentsParser.getEngineConnArgumentsParser.parseToObj(args) - val engineConf = arguments.getEngineConnConfMap - engineCreationContext.setUser(engineConf.getOrElse("user", Utils.getJvmUser)) - engineCreationContext.setTicketId(engineConf.getOrElse("ticketId", "")) - val host = CommonVars(Environment.ECM_HOST.toString, "127.0.0.1").getValue - val port = CommonVars(Environment.ECM_PORT.toString, "80").getValue - engineCreationContext.setEMInstance( - ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port") - ) - val labels = new util.ArrayList[Label[_]] - val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX)) - if (labelArgs.nonEmpty) { - labelArgs.foreach { case (key, value) => - labels.add( - labelBuilderFactory - .createLabel[Label[_]](key.replace(EngineConnArgumentsParser.LABEL_PREFIX, ""), value) - ) - } - engineCreationContext.setLabels(labels) - } - val jMap = new java.util.HashMap[String, String](engineConf.size) - engineConf.foreach(kv => jMap.put(kv._1, kv._2)) - engineCreationContext.setOptions(jMap) - engineCreationContext.setArgs(args) - // EngineConnObject.setEngineCreationContext(engineCreationContext) - logger.info( - "Finished to init engineCreationContext: " + EngineConnUtils.GSON - .toJson(engineCreationContext) - ) - - logger.info("Spring is enabled, now try to start SpringBoot.") - logger.info("<--------------------Start SpringBoot App-------------------->") - val parser = DWCArgumentsParser.parse(engineCreationContext.getArgs) - DWCArgumentsParser.setDWCOptionMap(parser.getDWCConfMap) - val existsExcludePackages = ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.getValue - if (!StringUtils.isEmpty(existsExcludePackages)) { - DataWorkCloudApplication.setProperty( - ServerConfiguration.BDP_SERVER_EXCLUDE_PACKAGES.key, - existsExcludePackages - ) - } - // 加载spring类 - DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(parser.getSpringConfMap)) - - logger.info("<--------------------SpringBoot App init succeed-------------------->") - } - -} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala index 1518f00a6c..b134fa5638 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/org/apache/linkis/engineconn/common/conf/EngineConnConf.scala @@ -37,7 +37,7 @@ object EngineConnConf { val ENGINE_CONN_ONCE_HOOKS = CommonVars( "linkis.engine.connector.once.hooks", - "org.apache.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook" + "org.apache.linkis.engineconn.once.executor.hook.OnceEngineConnHook" ) val ENGINE_LAUNCH_CMD_PARAMS_USER_KEY = @@ -61,6 +61,8 @@ object EngineConnConf { val ENGINE_CONN_LOCAL_LOG_DIRS_KEY = CommonVars("wds.linkis.engine.logs.dir.key", "LOG_DIRS") + val ENGINE_CONN_LOCAL_TMP_DIR = CommonVars("wds.linkis.engine.tmp.dir", "TEMP_DIRS") + val ENGINE_CONN_CREATION_WAIT_TIME = CommonVars("wds.linkis.engine.connector.init.time", new TimeType("8m")) @@ -81,6 +83,8 @@ object EngineConnConf { def getWorkHome: String = System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue) + def getEngineTmpDir: String = System.getenv(ENGINE_CONN_LOCAL_TMP_DIR.getValue) + def getLogDir: String = { val logDir = System.getenv(ENGINE_CONN_LOCAL_LOG_DIRS_KEY.getValue) if (StringUtils.isNotEmpty(logDir)) logDir else new File(getWorkHome, "logs").getPath diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala index f758b129e3..61242beaae 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/core/executor/ExecutorManager.scala @@ -137,7 +137,7 @@ class LabelExecutorManagerImpl extends LabelExecutorManager with Logging { } protected def getLabelKey(labels: Array[Label[_]]): String = - labels.map(_.getStringValue).mkString("&") + labels.filter(null != _).map(_.getStringValue).mkString("&") protected def createExecutor( engineCreationContext: EngineCreationContext, @@ -171,7 +171,10 @@ class LabelExecutorManagerImpl extends LabelExecutorManager with Logging { MessageFormat.format(CANNOT_GET_LABEL_KEY.getErrorDesc, GSON.toJson(labels)) ) } - + if (!executors.isEmpty && factories.size <= 1) { + logger.info("For a single Executor EC, if an Executor exists, it will be returned directly") + return getReportExecutor.asInstanceOf[LabelExecutor] + } if (!executors.containsKey(labelKey)) executors synchronized { if (!executors.containsKey(labelKey)) { val executor = tryCreateExecutor(engineCreationContext, labels) @@ -184,6 +187,12 @@ class LabelExecutorManagerImpl extends LabelExecutorManager with Logging { override def generateExecutorId(): Int = idCreator.getAndIncrement() override def getExecutorByLabels(labels: Array[Label[_]]): LabelExecutor = { + + if (!executors.isEmpty && factories.size <= 1) { + logger.info("For a single Executor EC, if an Executor exists, it will be returned directly") + return getReportExecutor.asInstanceOf[LabelExecutor] + } + val labelKey = getLabelKey(labels) if (null == labelKey) return null if (!executors.containsKey(labelKey)) executors synchronized { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala index 180798a772..14a0701d9d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/org/apache/linkis/engineconn/launch/EngineConnServer.scala @@ -127,6 +127,7 @@ object EngineConnServer extends Logging { this.engineCreationContext.setEMInstance( ServiceInstance(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME.getValue, s"$host:$port") ) + val labels = new ArrayBuffer[Label[_]] val labelArgs = engineConf.filter(_._1.startsWith(EngineConnArgumentsParser.LABEL_PREFIX)) if (labelArgs.nonEmpty) { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala index 0eb211f731..2d3a614434 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorConfiguration.scala @@ -37,13 +37,13 @@ object AccessibleExecutorConfiguration { val ENGINECONN_LOG_SEND_SIZE = CommonVars[Int]("wds.linkis.engineconn.log.send.cache.size", 300) val ENGINECONN_MAX_FREE_TIME = - CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("30m")) + CommonVars("wds.linkis.engineconn.max.free.time", new TimeType("5m")) val ENGINECONN_LOCK_CHECK_INTERVAL = CommonVars("wds.linkis.engineconn.lock.free.interval", new TimeType("3m")) - val ENGINECONN_SUPPORT_PARALLELISM: Boolean = - CommonVars("wds.linkis.engineconn.support.parallelism", false).getValue + val ENGINECONN_SUPPORT_PARALLELISM = + CommonVars("wds.linkis.engineconn.support.parallelism", false) val ENGINECONN_HEARTBEAT_TIME = CommonVars("wds.linkis.engineconn.heartbeat.time", new TimeType("2m")) @@ -55,4 +55,16 @@ object AccessibleExecutorConfiguration { val ENABLE_MAINTAIN_CREATORS = CommonVars("wds.linkis.engineconn.maintain.cretors", "IDE") + val REPORTING_DELAY_MS = CommonVars( + "linkis.engineconn.heartbeat.report.delay", + 20, + "Heartbeat status reporting delay, default 20ms, Negative numbers do not take effect" + ).getValue + + val REPORTING_IGNORE_MS = CommonVars( + "linkis.engineconn.heartbeat.report.ignore", + 3, + "Heartbeat status report repeated ignore, default 3ms,Negative numbers do not take effect" + ).getValue + } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala index 53cdd44b05..93cb41f344 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/conf/AccessibleExecutorSpringConfiguration.scala @@ -43,9 +43,13 @@ class AccessibleExecutorSpringConfiguration extends Logging { def createLockManager(): LockService = { val lockService = - if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM) { + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getHotValue()) { + logger.info("Engine supports parallelism.") new EngineConnConcurrentLockService - } else new EngineConnTimedLockService + } else { + logger.info("Engine doesn't support parallelism.") + new EngineConnTimedLockService + } asyncListenerBusContext.addListener(lockService) lockService } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/hook/OperationHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/hook/OperationHook.scala new file mode 100644 index 0000000000..12e42c66a5 --- /dev/null +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/hook/OperationHook.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconn.acessible.executor.hook + +import org.apache.linkis.manager.common.protocol.engine.{ + EngineOperateRequest, + EngineOperateResponse +} + +import scala.collection.mutable.ArrayBuffer + +trait OperationHook { + def getName(): String + + def doPreOperation( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit + + def doPostOperation( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit + +} + +object OperationHook { + private var operationHooks: ArrayBuffer[OperationHook] = new ArrayBuffer[OperationHook]() + + def registerOperationHook(operationHook: OperationHook): Unit = { + operationHooks.append(operationHook) + } + + def getOperationHooks(): Array[OperationHook] = operationHooks.toArray +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala index 26a9203795..af4d1eb017 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala @@ -28,6 +28,7 @@ import org.apache.linkis.engineconn.acessible.executor.listener.event.{ ExecutorUnLockEvent } import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.SensibleExecutor import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.manager.common.entity.enumeration.NodeStatus @@ -42,12 +43,10 @@ class EngineConnTimedLock(private var timeout: Long) val releaseScheduler = new ScheduledThreadPoolExecutor(1) var releaseTask: ScheduledFuture[_] = null var lastLockTime: Long = 0 - var lockedBy: AccessibleExecutor = null override def acquire(executor: AccessibleExecutor): Unit = { lock.acquire() lastLockTime = System.currentTimeMillis() - lockedBy = executor scheduleTimeout } @@ -57,8 +56,6 @@ class EngineConnTimedLock(private var timeout: Long) logger.debug("try to lock for succeed is " + succeed.toString) if (succeed) { lastLockTime = System.currentTimeMillis() - lockedBy = executor - logger.debug("try to lock for add time out task ! Locked by thread : " + lockedBy.getId) scheduleTimeout } succeed @@ -67,18 +64,13 @@ class EngineConnTimedLock(private var timeout: Long) // Unlock callback is not called in release method, because release method is called actively override def release(): Unit = { logger.debug( - "try to release for lock," + lockedBy + ",current thread " + Thread.currentThread().getName + s"try to release for lock: ${lock.toString}, current thread " + Thread.currentThread().getName ) - if (lockedBy != null) { - // && lockedBy == Thread.currentThread() Inconsistent thread(线程不一致) - logger.debug("try to release for lockedBy and thread ") - if (releaseTask != null) { - releaseTask.cancel(true) - releaseTask = null - } - logger.debug("try to release for lock release success") - lockedBy = null + if (releaseTask != null) { + releaseTask.cancel(true) + releaseTask = null } + logger.debug("try to release for lock release success") unlockCallback(lock.toString) resetLock() } @@ -96,7 +88,6 @@ class EngineConnTimedLock(private var timeout: Long) releaseScheduler.purge() } lock.release() - lockedBy = null } resetLock() } @@ -108,13 +99,18 @@ class EngineConnTimedLock(private var timeout: Long) new Runnable { override def run(): Unit = { synchronized { - if (isAcquired() && NodeStatus.Idle == lockedBy.getStatus && isExpired()) { - // unlockCallback depends on lockedBy, so lockedBy cannot be set null before unlockCallback - logger.info(s"Lock : [${lock.toString} was released due to timeout.") - release() - } else if (isAcquired() && NodeStatus.Busy == lockedBy.getStatus) { - lastLockTime = System.currentTimeMillis() - logger.info("Update lastLockTime because executor is busy.") + ExecutorManager.getInstance.getReportExecutor match { + case reportExecutor: AccessibleExecutor => + if ( + isAcquired() && NodeStatus.Idle == reportExecutor.getStatus && isExpired() + ) { + // unlockCallback depends on lockedBy, so lockedBy cannot be set null before unlockCallback + logger.info(s"Lock : [${lock.toString} was released due to timeout.") + release() + } else if (isAcquired() && NodeStatus.Busy == reportExecutor.getStatus) { + lastLockTime = System.currentTimeMillis() + logger.info("Update lastLockTime because executor is busy.") + } } } } @@ -143,14 +139,12 @@ class EngineConnTimedLock(private var timeout: Long) } override def renew(): Boolean = { - if (lockedBy != null) { - if (isAcquired && releaseTask != null) { - if (releaseTask.cancel(false)) { - releaseScheduler.purge() - scheduleTimeout - lastLockTime = System.currentTimeMillis() - return true - } + if (isAcquired && releaseTask != null) { + if (releaseTask.cancel(false)) { + releaseScheduler.purge() + scheduleTimeout + lastLockTime = System.currentTimeMillis() + return true } } false @@ -169,9 +163,18 @@ class EngineConnTimedLock(private var timeout: Long) } private def unlockCallback(lockStr: String): Unit = { - /* if (null != lockedBy) { - lockedBy.transition(NodeStatus.Unlock) - } */ + val nodeStatus = ExecutorManager.getInstance.getReportExecutor match { + case sensibleExecutor: SensibleExecutor => + sensibleExecutor.getStatus + case _ => NodeStatus.Idle + } + if (NodeStatus.isCompleted(nodeStatus)) { + logger.info( + "The node({}) is already in the completed state, and the unlocking is invalid", + nodeStatus.toString + ) + return + } val executors = ExecutorManager.getInstance.getExecutors.filter(executor => null != executor && !executor.isClosed ) @@ -185,7 +188,7 @@ class EngineConnTimedLock(private var timeout: Long) ExecutorListenerBusContext .getExecutorListenerBusContext() .getEngineConnAsyncListenerBus - .post(ExecutorUnLockEvent(null, lockStr.toString)) + .post(ExecutorUnLockEvent(null, lockStr)) } override def onExecutorCreated(executorCreateEvent: ExecutorCreateEvent): Unit = {} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala index ad762892ef..c7635615e0 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/operator/impl/EngineConnApplicationInfoOperator.scala @@ -17,9 +17,11 @@ package org.apache.linkis.engineconn.acessible.executor.operator.impl +import org.apache.linkis.engineconn.acessible.executor.service.OperateService import org.apache.linkis.engineconn.common.exception.EngineConnException import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.executor.entity.YarnExecutor +import org.apache.linkis.governance.common.constant.ec.ECConstants._ import org.apache.linkis.manager.common.operator.Operator class EngineConnApplicationInfoOperator extends Operator { @@ -30,10 +32,10 @@ class EngineConnApplicationInfoOperator extends Operator { ExecutorManager.getInstance.getReportExecutor match { case yarnExecutor: YarnExecutor => Map( - "applicationId" -> yarnExecutor.getApplicationId, - "applicationUrl" -> yarnExecutor.getApplicationURL, - "queue" -> yarnExecutor.getQueue, - "yarnMode" -> yarnExecutor.getYarnMode + YARN_APPID_NAME_KEY -> yarnExecutor.getApplicationId, + YARN_APP_URL_KEY -> yarnExecutor.getApplicationURL, + QUEUE -> yarnExecutor.getQueue, + YARN_MODE_KEY -> yarnExecutor.getYarnMode ) case _ => throw EngineConnException( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala index d07d16ce27..8ef944fc9c 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala @@ -19,6 +19,7 @@ package org.apache.linkis.engineconn.acessible.executor.service import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.acessible.executor.listener.event.{ ExecutorCompletedEvent, @@ -57,6 +58,12 @@ class DefaultAccessibleService extends AccessibleService with Logging { private var shutDownHooked: Boolean = false + private var lastStatusChanged: Long = System.currentTimeMillis() + + private var lastStatus: NodeStatus = null + + private var lastThreadName: String = null + @Receiver override def dealEngineStopRequest( engineSuicideRequest: EngineSuicideRequest, @@ -67,7 +74,7 @@ class DefaultAccessibleService extends AccessibleService with Logging { DataWorkCloudApplication.getServiceInstance.equals(engineSuicideRequest.getServiceInstance) ) { stopEngine() - logger.info(s"engine will suiside now.") + logger.info(s"engine was asked to suiside by ${engineSuicideRequest.getUser} now.") ShutdownHook.getShutdownHook.notifyStop() } else { if (null != engineSuicideRequest.getServiceInstance) { @@ -167,7 +174,39 @@ class DefaultAccessibleService extends AccessibleService with Logging { override def onExecutorStatusChanged( executorStatusChangedEvent: ExecutorStatusChangedEvent ): Unit = { - reportHeartBeatMsg(executorStatusChangedEvent.executor) + val sinceLastTime = System.currentTimeMillis() - lastStatusChanged + val reportDelay = AccessibleExecutorConfiguration.REPORTING_DELAY_MS + if ( + reportDelay > 0 && executorStatusChangedEvent.toStatus != lastStatus && reportDelay > sinceLastTime + ) { + logger.info( + "In order to ensure that the previous state is consumed first, sleep here {} ms", + reportDelay * 2 + ) + + Thread.sleep(reportDelay * 2) + } + val ignoreTime = AccessibleExecutorConfiguration.REPORTING_IGNORE_MS + val currentThreadName = Thread.currentThread().getName + if ( + ignoreTime > 0 && executorStatusChangedEvent.toStatus == lastStatus && ignoreTime > sinceLastTime && currentThreadName + .equals(lastThreadName) + ) { + logger.info( + "If the status is the same and the time is short and the thread is the same, no status report is performed {}", + executorStatusChangedEvent + ) + } else if ( + NodeStatus.Busy == lastStatus && executorStatusChangedEvent.toStatus == NodeStatus.Idle + ) { + logger.info("The state transition from Busy to Idle is not reported") + } else { + reportHeartBeatMsg(executorStatusChangedEvent.executor) + } + logger.info("Finished to report status {}", executorStatusChangedEvent) + lastStatusChanged = System.currentTimeMillis() + lastStatus = executorStatusChangedEvent.toStatus + lastThreadName = currentThreadName } private def reportHeartBeatMsg(executor: Executor): Unit = { diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala index 067e0d2cbb..ea3248ba6d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala @@ -78,6 +78,7 @@ class DefaultExecutorHeartbeatService heartbeatTime, TimeUnit.MILLISECONDS ) + ExecutorHeartbeatServiceHolder.registerHeartBeatService(this) } /** diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala index df34626c20..c0ef50636d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/DefaultOperateService.scala @@ -18,6 +18,7 @@ package org.apache.linkis.engineconn.acessible.executor.service import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.hook.OperationHook import org.apache.linkis.manager.common.operator.OperatorFactory import org.apache.linkis.manager.common.protocol.engine.{ EngineOperateRequest, @@ -38,19 +39,50 @@ class DefaultOperateService extends OperateService with Logging { override def executeOperation( engineOperateRequest: EngineOperateRequest ): EngineOperateResponse = { + var response: EngineOperateResponse = null + val parameters = engineOperateRequest.parameters.asScala.toMap val operator = Utils.tryCatch(OperatorFactory().getOperatorRequest(parameters)) { t => logger.error(s"Get operator failed, parameters is ${engineOperateRequest.parameters}.", t) - return EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + doPostHook(engineOperateRequest, response) + return response } logger.info( s"Try to execute operator ${operator.getClass.getSimpleName} with parameters ${engineOperateRequest.parameters}." ) val result = Utils.tryCatch(operator(parameters)) { t => logger.error(s"Execute ${operator.getClass.getSimpleName} failed.", t) - return EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + response = EngineOperateResponse(Map.empty, true, ExceptionUtils.getRootCauseMessage(t)) + doPostHook(engineOperateRequest, response) + return response + } + logger.info(s"End to execute operator ${operator.getClass.getSimpleName}.") + response = EngineOperateResponse(result) + doPostHook(engineOperateRequest, response) + response + } + + private def doPreHook( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit = { + Utils.tryAndWarn { + OperationHook + .getOperationHooks() + .foreach(hook => hook.doPreOperation(engineOperateRequest, engineOperateResponse)) + } + } + + private def doPostHook( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit = { + Utils.tryAndWarn { + OperationHook + .getOperationHooks() + .foreach(hook => hook.doPostOperation(engineOperateRequest, engineOperateResponse)) } - EngineOperateResponse(result) } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala index 452c6305b0..026234e938 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala @@ -51,7 +51,7 @@ class EngineConnTimedLockService extends LockService with Logging { private var lockType: EngineLockType = EngineLockType.Timed private def isSupportParallelism: Boolean = - AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM + AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getHotValue() /** * @param lock @@ -161,9 +161,7 @@ class EngineConnTimedLockService extends LockService with Logging { .toString ) if (isLockExist(lock)) { - logger.info( - s"try to unlock lockEntity : lockString=$lockString,lockedBy=${engineConnLock.lockedBy.getId}" - ) + logger.info(s"try to unlock lockEntity : lockString=$lockString") engineConnLock.release() this.lockString = null true diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala index bfecf73252..7abcbe8dcf 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/acessible/executor/service/ExecutorHeartbeatService.scala @@ -34,3 +34,14 @@ trait ExecutorHeartbeatService { def dealNodeHeartbeatRequest(nodeHeartbeatRequest: NodeHeartbeatRequest): NodeHeartbeatMsg } + +object ExecutorHeartbeatServiceHolder { + + private var executorHeartbeatService: ExecutorHeartbeatService = _ + + def registerHeartBeatService(executorHeartbeatService: ExecutorHeartbeatService): Unit = + this.executorHeartbeatService = executorHeartbeatService + + def getDefaultHeartbeatService(): ExecutorHeartbeatService = executorHeartbeatService + +} diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala index d7ad2c7979..a5bf02f84f 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala @@ -20,6 +20,7 @@ package org.apache.linkis.engineconn.callback.hook import org.apache.linkis.DataWorkCloudApplication import org.apache.linkis.common.conf.DWCArgumentsParser import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor import org.apache.linkis.engineconn.callback.service.{ EngineConnAfterStartCallback, EngineConnPidCallback @@ -28,6 +29,7 @@ import org.apache.linkis.engineconn.common.conf.EngineConnConf import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.common.hook.EngineConnHook +import org.apache.linkis.engineconn.core.executor.ExecutorManager import org.apache.linkis.engineconn.core.hook.ShutdownHook import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback @@ -59,7 +61,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { newMap.put("spring.mvc.servlet.path", ServerConfiguration.BDP_SERVER_RESTFUL_URI.getValue) DataWorkCloudApplication.main(DWCArgumentsParser.formatSpringOptions(newMap.toMap)) - val engineConnPidCallBack = new EngineConnPidCallback(engineCreationContext.getEMInstance) + val engineConnPidCallBack = new EngineConnPidCallback() Utils.tryAndError(engineConnPidCallBack.callback()) logger.info("<--------------------SpringBoot App init succeed-------------------->") } @@ -78,9 +80,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { engineCreationContext: EngineCreationContext, throwable: Throwable ): Unit = { - val engineConnAfterStartCallback = new EngineConnAfterStartCallback( - engineCreationContext.getEMInstance - ) + val engineConnAfterStartCallback = new EngineConnAfterStartCallback val prefixMsg = Sender.getThisServiceInstance + s": log dir: ${EngineConnConf.getLogDir}," Utils.tryAndError( engineConnAfterStartCallback.callback( @@ -88,7 +88,7 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { Sender.getThisServiceInstance, engineCreationContext.getTicketId, NodeStatus.Failed, - prefixMsg + ExceptionUtils.getRootCauseMessage(throwable) + prefixMsg + ExceptionUtils.getStackTrace(throwable) ) ) ) @@ -99,15 +99,29 @@ class CallbackEngineConnHook extends EngineConnHook with Logging { protected def getNodeStatusOfStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn - ): NodeStatus = NodeStatus.Success + ): NodeStatus = { + ExecutorManager.getInstance.getReportExecutor match { + case executor: AccessibleExecutor => + if ( + executor.getStatus == NodeStatus.ShuttingDown || executor.getStatus == NodeStatus.Failed + ) { + logger.info( + "The status of EngineConn is {}, and the actual status will be reported", + executor.getStatus + ) + executor.getStatus + } else { + NodeStatus.Unlock + } + case _ => NodeStatus.Unlock + } + } override def afterEngineServerStartSuccess( engineCreationContext: EngineCreationContext, engineConn: EngineConn ): Unit = { - val engineConnAfterStartCallback = new EngineConnAfterStartCallback( - engineCreationContext.getEMInstance - ) + val engineConnAfterStartCallback = new EngineConnAfterStartCallback Utils.tryAndError( engineConnAfterStartCallback.callback( EngineConnStatusCallback( diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala index fe6275ce67..d61e711f5d 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnAfterStartCallback.scala @@ -17,10 +17,7 @@ package org.apache.linkis.engineconn.callback.service -import org.apache.linkis.common.ServiceInstance - -class EngineConnAfterStartCallback(emInstance: ServiceInstance) - extends AbstractEngineConnStartUpCallback(emInstance) { +class EngineConnAfterStartCallback extends AbstractEngineConnStartUpCallback { override def callback(): Unit = {} } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala index 8a028d0a90..d1eb83d391 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnCallback.scala @@ -17,8 +17,8 @@ package org.apache.linkis.engineconn.callback.service -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.utils.Logging +import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback import org.apache.linkis.protocol.message.RequestProtocol @@ -26,31 +26,25 @@ import org.apache.linkis.rpc.Sender trait EngineConnCallback { - protected def getEMSender: Sender - def callback(): Unit } -abstract class AbstractEngineConnStartUpCallback(emInstance: ServiceInstance) - extends EngineConnCallback - with Logging { - - override protected def getEMSender: Sender = { - Sender.getSender(emInstance) - } +abstract class AbstractEngineConnStartUpCallback() extends EngineConnCallback with Logging { def callback(protocol: RequestProtocol): Unit = { protocol match { case protocol: EngineConnStatusCallback => if (protocol.status.equals(NodeStatus.Failed)) { - logger.error(s"protocol will send to em: ${protocol}") + logger.error(s"EngineConn Start Failed protocol will send to LM: ${protocol}") } else { - logger.info(s"protocol will send to em: ${protocol}") + logger.info(s"protocol will send to lm: ${protocol}") } case _ => } - getEMSender.send(protocol) + Sender + .getSender(GovernanceCommonConf.ENGINE_APPLICATION_MANAGER_SPRING_NAME.getValue) + .send(protocol) } } diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala index 23a3f90a2b..f0995c0b99 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/org/apache/linkis/engineconn/callback/service/EngineConnPidCallback.scala @@ -17,15 +17,13 @@ package org.apache.linkis.engineconn.callback.service -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.engineconn.core.EngineConnObject import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid import org.apache.linkis.rpc.Sender import java.lang.management.ManagementFactory -class EngineConnPidCallback(emInstance: ServiceInstance) - extends AbstractEngineConnStartUpCallback(emInstance) { +class EngineConnPidCallback extends AbstractEngineConnStartUpCallback { override def callback(): Unit = { val pid = ManagementFactory.getRuntimeMXBean.getName.split("@")(0) diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java index 1685f4b652..24c8b904cd 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/java/org/apache/linkis/manager/engineplugin/errorcode/EngineconnCoreErrorCodeSummary.java @@ -23,7 +23,7 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode { FAILED_CREATE_ELR(10001, "Failed to createEngineConnLaunchRequest(创建 EngineConnLaunchRequest失败)"), EN_PLUGIN_MATERIAL_SOURCE_EXCEPTION( 10001, - "The engine plug-in material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)"), + "The engine plugin material is abnormal, please check whether the material is uploaded successfully(引擎插件物料异常,请检查物料是否上传成功)"), ETL_REQUESTED(10001, "EngineTypeLabel are requested(需要参数 EngineTypeLabel)"), CANNOT_INSTANCE_ECE(20000, "Cannot instance EngineConnExecution(无法实例化 EngineConnExecution)"), @@ -32,7 +32,7 @@ public enum EngineconnCoreErrorCodeSummary implements LinkisErrorCode { UCL_NOT_EXISTS(20000, "UserCreatorLabel does not exist(UserCreatorLabel 不存在)"), CANNOT_HOME_PATH_EC(20001, "Cannot find the home path of engineConn(找不到 engineConn 的 home 路径)"), CANNOT_HOME_PATH_DIST( - 20001, "Cannot find the home path of engineconn dist(找不到 engineconn dist 的 home 路径)"), + 20001, "Cannot find the home path:{0} of engineconn dist(找不到 engineconn dist 的 home 路径)"), DIST_IS_EMPTY( 20001, "The dist of EngineConn is empty,engineConnType is:{0}(EngineConn 的 dist 为空,engineConnType为:{0})"), diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala index 290c6211e1..f3235ffa34 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala @@ -41,13 +41,12 @@ object EnvConfiguration { val ENGINE_CONN_CLASSPATH_FILES = CommonVars("wds.linkis.engineConn.files", "", "engineConn额外的配置文件") - val metaspaceSize = if (SystemUtils.isJavaVersionAtLeast(JavaVersion.JAVA_1_8)) { - "-XX:MaxMetaspaceSize=256m -XX:MetaspaceSize=128m" - } else { - "-XX:MaxPermSize=256m -XX:PermSize=128m" - } + val MAX_METASPACE_SIZE = CommonVars("linkis.engineconn.metaspace.size.max", "256m") - val ENGINE_CONN_DEFAULT_JAVA_OPTS = CommonVars[String]( + lazy val metaspaceSize = + s"-XX:MaxMetaspaceSize=${MAX_METASPACE_SIZE.getValue} -XX:MetaspaceSize=128m" + + lazy val ENGINE_CONN_DEFAULT_JAVA_OPTS = CommonVars[String]( "wds.linkis.engineConn.javaOpts.default", s"-XX:+UseG1GC ${metaspaceSize} " + s"-Xloggc:%s -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Dwds.linkis.server.conf=linkis-engineconn.properties -Dwds.linkis.gateway.url=${Configuration.getGateWayURL()}" diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala index 5271ec37e9..082b02a020 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala @@ -53,7 +53,7 @@ abstract class JavaProcessEngineConnLaunchBuilder this.engineConnResourceGenerator = engineConnResourceGenerator protected def getGcLogDir(engineConnBuildRequest: EngineConnBuildRequest): String = - variable(LOG_DIRS) + "/gc.log" + variable(LOG_DIRS) + "/gc" protected def getLogDir(engineConnBuildRequest: EngineConnBuildRequest): String = s" -Dlogging.file=${EnvConfiguration.LOG4J2_XML_FILE.getValue} " + diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala index aada8caedc..02565a394b 100644 --- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala +++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-plugin-core/src/main/scala/org/apache/linkis/manager/engineplugin/common/resource/UserNodeResource.scala @@ -34,6 +34,9 @@ class UserNodeResource extends NodeResource { private var leftResource: Resource = _ private var createTime: Date = _ private var updateTime: Date = _ + private var maxApps: Int = _ + private var numPendingApps: Int = _ + private var numActiveApps: Int = _ def getUser: String = user @@ -87,4 +90,23 @@ class UserNodeResource extends NodeResource { override def getId: Integer = id override def setId(id: Integer): Unit = this.id = id + + override def getMaxApps: Integer = maxApps + + override def setMaxApps(maxApps: Integer): Unit = { + this.maxApps = maxApps + } + + override def getNumPendingApps: Integer = numPendingApps + + override def setNumPendingApps(numPendingApps: Integer): Unit = { + this.numPendingApps = numPendingApps + } + + override def getNumActiveApps: Integer = numActiveApps + + override def setNumActiveApps(numActiveApps: Integer): Unit = { + this.numActiveApps = numActiveApps + } + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java index 0bf27a68b3..86b1a91f7a 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/conf/EntranceSpringConfiguration.java @@ -42,6 +42,7 @@ import org.apache.linkis.entrance.persistence.QueryPersistenceManager; import org.apache.linkis.entrance.persistence.ResultSetEngine; import org.apache.linkis.entrance.scheduler.EntranceGroupFactory; +import org.apache.linkis.entrance.scheduler.EntranceParallelConsumerManager; import org.apache.linkis.entrance.scheduler.EntranceSchedulerContext; import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder; import org.apache.linkis.orchestrator.ecm.EngineConnManagerBuilder$; @@ -51,7 +52,6 @@ import org.apache.linkis.scheduler.executer.ExecutorManager; import org.apache.linkis.scheduler.queue.ConsumerManager; import org.apache.linkis.scheduler.queue.GroupFactory; -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager; import org.apache.linkis.scheduler.queue.parallelqueue.ParallelScheduler; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; @@ -150,7 +150,7 @@ public EntranceInterceptor[] entranceInterceptors() { new ScalaCodeInterceptor(), new SQLLimitEntranceInterceptor(), new CommentInterceptor(), - new SetTenantLabelInterceptor(), + // new SetTenantLabelInterceptor(), new UserCreatorIPCheckInterceptor() }; } @@ -190,7 +190,7 @@ public GroupFactory groupFactory() { @Bean @ConditionalOnMissingBean public ConsumerManager consumerManager() { - return new ParallelConsumerManager( + return new EntranceParallelConsumerManager( ENTRANCE_SCHEDULER_MAX_PARALLELISM_USERS().getValue(), "EntranceJobScheduler"); } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java index 2f045a1760..f92083df0c 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/errorcode/EntranceErrorCodeSummary.java @@ -62,6 +62,11 @@ public enum EntranceErrorCodeSummary implements LinkisErrorCode { INVALID_RESULTSETS(20053, "Invalid resultsets, cannot use cache(结果集无效,无法使用 cache)"), SUBMITTING_QUERY_FAILED(30009, "Submitting the query failed(提交查询失败)!"), + + SUBMIT_CODE_ISEMPTY( + 30010, + "Submitting the execution code, after code preprocessing, the real execution code is empty, please check the executed code(提交的执行代码,经过预处理后为空,请检查执行的代码是否为空或则只有注解)!"), + QUERY_STATUS_FAILED(50081, "Query from jobHistory status failed(从 jobHistory 状态查询失败)"), GET_QUERY_RESPONSE(50081, "Get query response incorrectly(获取查询响应结果不正确)"), QUERY_TASKID_ERROR(50081, "Query task of taskId:{0} error(查询任务id:{}的任务出错)"), diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java index 39964fdad1..44966fc1f1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/persistence/QueryPersistenceManager.java @@ -23,6 +23,7 @@ import org.apache.linkis.entrance.cs.CSEntranceHelper; import org.apache.linkis.entrance.execute.EntranceJob; import org.apache.linkis.entrance.log.FlexibleErrorCodeManager; +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; import org.apache.linkis.governance.common.entity.job.JobRequest; import org.apache.linkis.protocol.engine.JobProgressInfo; import org.apache.linkis.scheduler.executer.OutputExecuteResponse; @@ -169,6 +170,11 @@ private void updateJobStatus(Job job) { JobRequest jobRequest = null; if (job.isCompleted()) { job.setProgress(1); + } else if (job.getProgress() >= 1 && job instanceof EntranceJob) { + job.setProgress(GovernanceCommonConf.FAKE_PROGRESS()); + ((EntranceJob) job) + .getJobRequest() + .setProgress(String.valueOf(GovernanceCommonConf.FAKE_PROGRESS())); } try { jobRequest = this.entranceContext.getOrCreateEntranceParser().parseToJobRequest(job); diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java new file mode 100644 index 0000000000..424e7ca170 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceConsumerRestfulApi.java @@ -0,0 +1,88 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.restful; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.EntranceServer; +import org.apache.linkis.scheduler.queue.ConsumerManager; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "entrance lable manager") +@RestController +@RequestMapping(path = "/entrance/operation/consumer") +public class EntranceConsumerRestfulApi { + + private EntranceServer entranceServer; + + private static final Logger logger = LoggerFactory.getLogger(EntranceConsumerRestfulApi.class); + + @Autowired + public void setEntranceServer(EntranceServer entranceServer) { + this.entranceServer = entranceServer; + } + + @ApiOperation(value = "kill-consumer", notes = "kill consumer", response = Message.class) + @RequestMapping(path = "/kill", method = RequestMethod.GET) + public Message killConsumer( + HttpServletRequest req, @RequestParam(value = "groupName") String groupName) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + logger.info("user {} to kill consumer {}", operationUser, groupName); + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + consumerManager.destroyConsumer(groupName); + logger.info("user {} finished to kill consumer {}", operationUser, groupName); + return Message.ok(); + } + + @ApiOperation(value = "consumer-info", notes = "list consumers info", response = Message.class) + @RequestMapping(path = "/info", method = RequestMethod.GET) + public Message countConsumer(HttpServletRequest req) { + String operationUser = ModuleUserUtils.getOperationUser(req, "kill consumer"); + if (Configuration.isNotAdmin(operationUser)) { + return Message.error("only admin can do this"); + } + ConsumerManager consumerManager = + entranceServer + .getEntranceContext() + .getOrCreateScheduler() + .getSchedulerContext() + .getOrCreateConsumerManager(); + return Message.ok().data("consumerNum", consumerManager.listConsumers().length); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java index 2ab457747c..8b3a466c24 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceLabelRestfulApi.java @@ -22,6 +22,7 @@ import org.apache.linkis.manager.label.constant.LabelKeyConstant; import org.apache.linkis.manager.label.constant.LabelValueConstant; import org.apache.linkis.protocol.label.InsLabelRefreshRequest; +import org.apache.linkis.protocol.label.InsLabelRemoveRequest; import org.apache.linkis.rpc.Sender; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -47,6 +48,8 @@ public class EntranceLabelRestfulApi { private static final Logger logger = LoggerFactory.getLogger(EntranceLabelRestfulApi.class); + private static Boolean offlineFlag = false; + @ApiOperation(value = "update", notes = "update route label", response = Message.class) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @RequestMapping(path = "/update", method = RequestMethod.POST) @@ -72,13 +75,41 @@ public Message updateRouteLabel(HttpServletRequest req, @RequestBody JsonNode js public Message updateRouteLabel(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "markoffline"); Map labels = new HashMap(); - logger.info("Prepare to modify the routelabel of entry to offline"); + logger.info("Prepare to modify the routelabel of entrance to offline"); labels.put(LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE); InsLabelRefreshRequest insLabelRefreshRequest = new InsLabelRefreshRequest(); insLabelRefreshRequest.setLabels(labels); insLabelRefreshRequest.setServiceInstance(Sender.getThisServiceInstance()); InstanceLabelClient.getInstance().refreshLabelsToInstance(insLabelRefreshRequest); + synchronized (offlineFlag) { + offlineFlag = true; + } logger.info("Finished to modify the routelabel of entry to offline"); return Message.ok(); } + + @ApiOperation( + value = "backonline", + notes = "from offline status to recover", + response = Message.class) + @RequestMapping(path = "/backonline", method = RequestMethod.GET) + public Message backOnline(HttpServletRequest req) { + ModuleUserUtils.getOperationUser(req, "backonline"); + logger.info("Prepare to modify the routelabel of entrance to remove offline"); + InsLabelRemoveRequest insLabelRemoveRequest = new InsLabelRemoveRequest(); + insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().removeLabelsFromInstance(insLabelRemoveRequest); + synchronized (offlineFlag) { + offlineFlag = false; + } + logger.info("Finished to backonline"); + return Message.ok(); + } + + @ApiOperation(value = "isOnline", notes = "entrance isOnline", response = Message.class) + @RequestMapping(path = "/isOnline", method = RequestMethod.GET) + public Message isOnline(HttpServletRequest req) { + logger.info("Whether Entrance is online: {}", !offlineFlag); + return Message.ok().data("isOnline", !offlineFlag); + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java index 7d36df8fec..7b487352d5 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceMetricRestfulApi.java @@ -20,8 +20,7 @@ import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.entrance.EntranceServer; import org.apache.linkis.entrance.execute.EntranceJob; -import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; -import org.apache.linkis.manager.label.utils.LabelUtil; +import org.apache.linkis.entrance.scheduler.CreatorECTypeDefaultConf; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; @@ -67,7 +66,7 @@ public Message taskinfo( HttpServletRequest req, @RequestParam(value = "user", required = false) String user, @RequestParam(value = "creator", required = false) String creator, - @RequestParam(value = "engineTypeLabel", required = false) String engineTypeLabelValue) { + @RequestParam(value = "ecType", required = false) String ecType) { String userName = ModuleUserUtils.getOperationUser(req, "taskinfo"); String queryUser = user; if (Configuration.isNotAdmin(userName)) { @@ -83,23 +82,12 @@ public Message taskinfo( } else if (StringUtils.isBlank(creator)) { filterWords = queryUser; } - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords); - int taskNumber = 0; + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(filterWords, ecType); int runningNumber = 0; int queuedNumber = 0; if (null != undoneTasks) { for (EntranceJob task : undoneTasks) { - if (StringUtils.isNotBlank(engineTypeLabelValue)) { - EngineTypeLabel engineTypeLabel = - LabelUtil.getEngineTypeLabel(task.getJobRequest().getLabels()); - // Task types do not match, do not count - if (null == engineTypeLabel - || !engineTypeLabelValue.equalsIgnoreCase(engineTypeLabel.getStringValue())) { - continue; - } - } - taskNumber++; if (task.isRunning()) { runningNumber++; } else { @@ -107,17 +95,25 @@ public Message taskinfo( } } } - return Message.ok("success") - .data("taskNumber", taskNumber) - .data("runningNumber", runningNumber) - .data("queuedNumber", queuedNumber); + Message resp = + Message.ok("success") + .data("taskNumber", undoneTasks.length) + .data("runningNumber", runningNumber) + .data("queuedNumber", queuedNumber); + if (StringUtils.isNoneBlank(creator, ecType)) { + int creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creator, ecType); + resp.data("creatorECTypeMaxRunningJobs", creatorECTypeMaxRunningJobs); + resp.data("limitExceeded", runningNumber > creatorECTypeMaxRunningJobs); + } + return resp; } - @ApiOperation(value = "Status", notes = "get running task number ", response = Message.class) + @ApiOperation(value = "runningtask", notes = "get running task number ", response = Message.class) @RequestMapping(path = "/runningtask", method = RequestMethod.GET) - public Message status(HttpServletRequest req) { + public Message runningtask(HttpServletRequest req) { ModuleUserUtils.getOperationUser(req, "runningtask"); - EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask(""); + EntranceJob[] undoneTasks = entranceServer.getAllUndoneTask("", null); Boolean isCompleted = false; if (null == undoneTasks || undoneTasks.length < 1) { isCompleted = true; diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java index a5d3ace35f..b616c9c658 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/restful/EntranceRestfulApi.java @@ -110,15 +110,6 @@ public Message execute(HttpServletRequest req, @RequestBody Map JobRequest jobReq = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobReq.getId(); ModuleUserUtils.getOperationUser(req, "execute task,id: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobReq.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); String execID = ZuulEntranceUtils.generateExecID( job.getId(), @@ -162,15 +153,6 @@ public Message submit(HttpServletRequest req, @RequestBody Map j JobRequest jobRequest = ((EntranceJob) job).getJobRequest(); Long jobReqId = jobRequest.getId(); ModuleUserUtils.getOperationUser(req, "submit jobReqId: " + jobReqId); - pushLog( - LogUtils.generateInfo( - "You have submitted a new job, script code (after variable substitution) is"), - job); - pushLog( - "************************************SCRIPT CODE************************************", job); - pushLog(jobRequest.getExecutionCode(), job); - pushLog( - "************************************SCRIPT CODE************************************", job); pushLog( LogUtils.generateInfo( "Your job is accepted, jobID is " @@ -594,9 +576,9 @@ public Message killJobs( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)"); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)", + t); message.setMethod("/api/entrance/" + id + "/kill"); - message.setStatus(1); } } messages.add(message); @@ -678,7 +660,9 @@ public Message kill( logger.error("kill job {} failed ", job.get().getId(), t); message = Message.error( - "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败)"); + "An exception occurred while killing the job, kill failed(kill job的时候出现了异常,kill失败) with error:" + + t.getMessage(), + t); message.setMethod("/api/entrance/" + id + "/kill"); message.setStatus(1); } diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java new file mode 100644 index 0000000000..5a91c71a11 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/CreatorECTypeDefaultConf.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.entrance.conf.EntranceConfiguration; +import org.apache.linkis.entrance.utils.EntranceUtils; +import org.apache.linkis.governance.common.protocol.conf.RequestQueryEngineConfig; +import org.apache.linkis.governance.common.protocol.conf.ResponseQueryConfig; +import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; +import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; +import org.apache.linkis.rpc.Sender; + +import org.apache.commons.lang3.StringUtils; + +import java.util.concurrent.TimeUnit; + +import scala.Tuple2; + +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.CacheLoader; +import com.google.common.cache.LoadingCache; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class CreatorECTypeDefaultConf { + + private static final Logger logger = LoggerFactory.getLogger(CreatorECTypeDefaultConf.class); + + public static Sender confSender = + Sender.getSender( + Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME().getValue()); + + private static LoadingCache confCache = + CacheBuilder.newBuilder() + .maximumSize(1000) + .expireAfterWrite( + (long) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE().getValue(), + TimeUnit.MINUTES) + .build( + new CacheLoader() { + @Override + public Integer load(String key) throws Exception { + Tuple2 tuple2 = + EntranceUtils.fromKeyGetLabels(key); + RequestQueryEngineConfig requestQueryEngineConfig = + new RequestQueryEngineConfig(tuple2._1, tuple2._2(), null); + int jobLimit = + (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + try { + Object response = confSender.ask(requestQueryEngineConfig); + if (response instanceof ResponseQueryConfig) { + jobLimit = + (int) + EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT() + .getValue(((ResponseQueryConfig) response).getKeyAndValue()); + } + } catch (Exception e) { + logger.warn("Failed to get key {} from conf", key, e); + } + return jobLimit; + } + }); + + public static int getCreatorECTypeMaxRunningJobs(String creator, String ecType) { + int jobLimit = (int) EntranceConfiguration.ENTRANCE_CREATOR_JOB_LIMIT().getValue(); + if (StringUtils.isNoneBlank(creator, ecType)) { + try { + String key = EntranceUtils.getDefaultCreatorECTypeKey(creator, ecType); + jobLimit = confCache.get(key); + } catch (Exception e) { + logger.warn("Failed to get key creator {} ecType {} from cache", creator, ecType, e); + } + } + int entranceNumber = EntranceUtils.getRunningEntranceNumber(); + return jobLimit / entranceNumber; + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.java new file mode 100644 index 0000000000..387606256a --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceFIFOUserConsumer.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.scheduler.SchedulerContext; +import org.apache.linkis.scheduler.queue.Consumer; +import org.apache.linkis.scheduler.queue.Group; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; + +import java.util.concurrent.ExecutorService; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class EntranceFIFOUserConsumer extends FIFOUserConsumer { + + private static final Logger logger = LoggerFactory.getLogger(EntranceFIFOUserConsumer.class); + + public EntranceFIFOUserConsumer( + SchedulerContext schedulerContext, ExecutorService executeService, Group group) { + super(schedulerContext, executeService, group); + } + + @Override + public boolean runScheduleIntercept() { + Consumer[] consumers = getSchedulerContext().getOrCreateConsumerManager().listConsumers(); + int creatorRunningJobNum = 0; + String[] groupNames = getGroup().getGroupName().split("_"); + if (groupNames.length < 3) { + return true; + } + String creatorName = groupNames[0]; + String ecType = groupNames[2]; + for (Consumer consumer : consumers) { + String groupName = consumer.getGroup().getGroupName(); + if (groupName.startsWith(creatorName) && groupName.endsWith(ecType)) { + creatorRunningJobNum += consumer.getRunningEvents().length; + } + } + int creatorECTypeMaxRunningJobs = + CreatorECTypeDefaultConf.getCreatorECTypeMaxRunningJobs(creatorName, ecType); + if (creatorRunningJobNum > creatorECTypeMaxRunningJobs) { + logger.error( + "Creator: {} EC: {} there are currently {} jobs running that exceed the maximum limit: {}", + creatorName, + ecType, + creatorRunningJobNum, + creatorECTypeMaxRunningJobs); + return false; + } else { + return true; + } + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.java new file mode 100644 index 0000000000..98f0929ee9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/scheduler/EntranceParallelConsumerManager.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.scheduler; + +import org.apache.linkis.scheduler.queue.Group; +import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer; +import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager; + +public class EntranceParallelConsumerManager extends ParallelConsumerManager { + + public EntranceParallelConsumerManager(int maxParallelismUsers, String schedulerName) { + super(maxParallelismUsers, schedulerName); + } + + @Override + public FIFOUserConsumer createConsumer(String groupName) { + Group group = getSchedulerContext().getOrCreateGroupFactory().getGroup(groupName); + return new EntranceFIFOUserConsumer(getSchedulerContext(), getOrCreateExecutorService(), group); + } +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java index 4301334965..fac85f32c1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java +++ b/linkis-computation-governance/linkis-entrance/src/main/java/org/apache/linkis/entrance/server/DefaultEntranceServer.java @@ -94,11 +94,12 @@ private void shutdownEntrance(ContextClosedEvent event) { logger.warn("event has been handled"); } else { logger.warn("Entrance exit to stop all job"); - EntranceJob[] allUndoneJobs = getAllUndoneTask(null); + EntranceJob[] allUndoneJobs = getAllUndoneTask(null, null); if (null != allUndoneJobs) { for (EntranceJob job : allUndoneJobs) { job.onFailure( - "Entrance exits the automatic cleanup task and can be rerun(服务退出自动清理任务,可以重跑)", null); + "Your job will be marked as canceled because the Entrance service restarted(因为Entrance服务重启,您的任务将被标记为取消)", + null); } } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala index f298e54251..eed2929c23 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceServer.scala @@ -20,6 +20,11 @@ package org.apache.linkis.entrance import org.apache.linkis.common.exception.{ErrorException, LinkisException, LinkisRuntimeException} import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.conf.EntranceConfiguration.{ + ENABLE_JOB_TIMEOUT_CHECK, + ENTRANCE_TASK_TIMEOUT +} import org.apache.linkis.entrance.cs.CSEntranceHelper import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorException, SubmitFailedException} @@ -28,6 +33,7 @@ import org.apache.linkis.entrance.log.LogReader import org.apache.linkis.entrance.timeout.JobTimeoutManager import org.apache.linkis.entrance.utils.JobHistoryHelper import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.utils.LoggerUtils import org.apache.linkis.protocol.constants.TaskConstant import org.apache.linkis.rpc.Sender import org.apache.linkis.scheduler.queue.{Job, SchedulerEventState} @@ -38,6 +44,7 @@ import org.apache.commons.lang3.exception.ExceptionUtils import java.text.MessageFormat import java.util +import java.util.concurrent.TimeUnit abstract class EntranceServer extends Logging { @@ -45,6 +52,8 @@ abstract class EntranceServer extends Logging { private val jobTimeoutManager: JobTimeoutManager = new JobTimeoutManager() + private val timeoutCheck = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + def init(): Unit def getName: String @@ -78,12 +87,15 @@ abstract class EntranceServer extends Logging { } logger.info(s"received a request,convert $jobRequest") + LoggerUtils.setJobIdMDC(jobRequest.getId.toString) + val logAppender = new java.lang.StringBuilder() Utils.tryThrow( getEntranceContext .getOrCreateEntranceInterceptors() .foreach(int => jobRequest = int.apply(jobRequest, logAppender)) ) { t => + LoggerUtils.removeJobIdMDC() val error = t match { case error: ErrorException => error case t1: Throwable => @@ -150,6 +162,18 @@ abstract class EntranceServer extends Logging { * this to trigger JobListener.onJobinit() */ Utils.tryAndWarn(job.getJobListener.foreach(_.onJobInited(job))) + if (logger.isDebugEnabled()) { + logger.debug( + s"After code preprocessing, the real execution code is:${jobRequest.getExecutionCode}" + ) + } + if (StringUtils.isBlank(jobRequest.getExecutionCode)) { + throw new SubmitFailedException( + SUBMIT_CODE_ISEMPTY.getErrorCode, + SUBMIT_CODE_ISEMPTY.getErrorDesc + ) + } + getEntranceContext.getOrCreateScheduler().submit(job) val msg = LogUtils.generateInfo( s"Job with jobId : ${jobRequest.getId} and execID : ${job.getId()} submitted " @@ -159,14 +183,16 @@ abstract class EntranceServer extends Logging { job match { case entranceJob: EntranceJob => entranceJob.getJobRequest.setReqId(job.getId()) - if (jobTimeoutManager.timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { + if (timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) { jobTimeoutManager.add(job.getId(), entranceJob) } entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg)) case _ => } + LoggerUtils.removeJobIdMDC() job } { t => + LoggerUtils.removeJobIdMDC() job.onFailure("Submitting the query failed!(提交查询失败!)", t) val _jobRequest: JobRequest = getEntranceContext.getOrCreateEntranceParser().parseToJobRequest(job) @@ -206,7 +232,7 @@ abstract class EntranceServer extends Logging { entranceWebSocketService } else None - def getAllUndoneTask(filterWords: String): Array[EntranceJob] = { + def getAllUndoneTask(filterWords: String, ecType: String = null): Array[EntranceJob] = { val consumers = getEntranceContext .getOrCreateScheduler() .getSchedulerContext @@ -214,7 +240,14 @@ abstract class EntranceServer extends Logging { .listConsumers() .toSet val filterConsumer = if (StringUtils.isNotBlank(filterWords)) { - consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + if (StringUtils.isNotBlank(ecType)) { + consumers.filter(consumer => + consumer.getGroup.getGroupName.contains(filterWords) && consumer.getGroup.getGroupName + .contains(ecType) + ) + } else { + consumers.filter(_.getGroup.getGroupName.contains(filterWords)) + } } else { consumers } @@ -227,6 +260,40 @@ abstract class EntranceServer extends Logging { .toArray } + /** + * to check timeout task,and kill timeout task timeout: default > 48h + */ + def startTimeOutCheck(): Unit = { + Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + override def run(): Unit = { + Utils.tryCatch { + + val timeoutType = EntranceConfiguration.ENTRANCE_TASK_TIMEOUT.getHotValue() + logger.info(s"Start to check timeout Job, timout is ${timeoutType}") + val timeoutTime = System.currentTimeMillis() - timeoutType.toLong + getAllUndoneTask(null, null).filter(job => job.createTime < timeoutTime).foreach { + job => + job.onFailure(s"Job has run for longer than the maximum time $timeoutType", null) + } + logger.info(s"Finished to check timeout Job, timout is ${timeoutType}") + } { case t: Throwable => + logger.warn(s"TimeoutDetective Job failed. ${t.getMessage}", t) + } + } + + }, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + EntranceConfiguration.ENTRANCE_TASK_TIMEOUT_SCAN.getValue.toLong, + TimeUnit.MILLISECONDS + ) + } + + if (timeoutCheck) { + logger.info("Job time check is enabled") + startTimeOutCheck() + } + } object EntranceServer { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala index 714b9f0cc2..b5339c9e2e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/EntranceWebSocketService.scala @@ -215,18 +215,6 @@ class EntranceWebSocketService s"Your job's execution code is (after variable substitution and code check) " ) ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) - entranceServer.getEntranceContext - .getOrCreateLogManager() - .onLogUpdate( - job, - "************************************SCRIPT CODE************************************" - ) entranceServer.getEntranceContext .getOrCreateLogManager() .onLogUpdate( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cache/GlobalConfigurationKeyValueCache.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cache/GlobalConfigurationKeyValueCache.scala deleted file mode 100644 index d4190d6ca4..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/cache/GlobalConfigurationKeyValueCache.scala +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.cache - -import org.apache.linkis.common.conf.Configuration -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.governance.common.protocol.conf.{ - RequestQueryGlobalConfig, - ResponseQueryConfig -} -import org.apache.linkis.protocol.CacheableProtocol -import org.apache.linkis.rpc.RPCMapCache - -import java.util - -object GlobalConfigurationKeyValueCache - extends RPCMapCache[JobRequest, String, String]( - Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue - ) { - - override protected def createRequest(jobReq: JobRequest): CacheableProtocol = - RequestQueryGlobalConfig(jobReq.getExecuteUser) - - override protected def createMap(any: Any): util.Map[String, String] = any match { - case response: ResponseQueryConfig => response.getKeyAndValue - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala index 5c61ce0b3b..4d9e895de6 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/conf/EntranceConfiguration.scala @@ -220,7 +220,26 @@ object EntranceConfiguration { val CREATOR_IP_SWITCH = CommonVars("wds.linkis.entrance.user.creator.ip.interceptor.switch", false) + val TEMPLATE_CONF_SWITCH = + CommonVars("wds.linkis.entrance.template.conf.interceptor.switch", true) + val ENABLE_ENTRANCE_DIRTY_DATA_CLEAR = CommonVars("linkis.entrance.auto.clean.dirty.data.enable", true) + val ENTRANCE_CREATOR_JOB_LIMIT: CommonVars[Int] = + CommonVars[Int]( + "linkis.entrance.creator.job.concurrency.limit", + 10000, + "Creator task concurrency limit parameters" + ) + + val ENTRANCE_CREATOR_JOB_LIMIT_CONF_CACHE = + CommonVars("linkis.entrance.creator.job.concurrency.limit.conf.cache.time", 30L) + + val ENTRANCE_TASK_TIMEOUT = + CommonVars("wds.linkis.entrance.task.timeout", new TimeType("48h")) + + val ENTRANCE_TASK_TIMEOUT_SCAN = + CommonVars("wds.linkis.entrance.task.timeout.scan", new TimeType("12h")) + } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala index c509f10056..266de6eb5b 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/DefaultEntranceExecutor.scala @@ -25,6 +25,7 @@ import org.apache.linkis.entrance.orchestrator.EntranceOrchestrationFactory import org.apache.linkis.entrance.utils.JobHistoryHelper import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.protocol.task.ResponseTaskStatus +import org.apache.linkis.governance.common.utils.LoggerUtils import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.CodeLanguageLabel import org.apache.linkis.manager.label.utils.LabelUtil @@ -106,6 +107,7 @@ class DefaultEntranceExecutor(id: Long) entranceExecuteRequest: EntranceExecuteRequest, orchestration: Orchestration ): Unit = { + LoggerUtils.setJobIdMDC(getId.toString) orchestrationResponse match { case succeedResponse: SucceedTaskResponse => succeedResponse match { @@ -184,6 +186,7 @@ class DefaultEntranceExecutor(id: Long) _.onLogUpdate(entranceExecuteRequest.getJob, LogUtils.generateERROR(msg)) ) } + LoggerUtils.removeJobIdMDC() } def requestToComputationJobReq(entranceExecuteRequest: EntranceExecuteRequest): JobReq = { @@ -238,11 +241,13 @@ class DefaultEntranceExecutor(id: Long) } override def kill(): Boolean = { + LoggerUtils.setJobIdMDC(getId.toString) logger.info("Entrance start to kill job {} invoke Orchestrator ", this.getId) Utils.tryAndWarn { val msg = s"You job with id was cancelled by user!" getRunningOrchestrationFuture.foreach(_.cancel(msg)) } + LoggerUtils.removeJobIdMDC() true } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala index 44cb3620ce..be7fb13871 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutor.scala @@ -19,9 +19,11 @@ package org.apache.linkis.entrance.execute import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.job.EntranceExecuteRequest import org.apache.linkis.governance.common.entity.ExecutionNodeStatus._ import org.apache.linkis.governance.common.protocol.task.{RequestTask, ResponseTaskStatus} +import org.apache.linkis.governance.common.utils.LoggerUtils import org.apache.linkis.orchestrator.computation.operation.log.LogProcessor import org.apache.linkis.orchestrator.computation.operation.progress.ProgressProcessor import org.apache.linkis.orchestrator.computation.operation.resource.ResourceReportProcessor @@ -29,6 +31,7 @@ import org.apache.linkis.orchestrator.core.OrchestrationFuture import org.apache.linkis.protocol.UserWithCreator import org.apache.linkis.scheduler.executer._ import org.apache.linkis.scheduler.executer.ExecutorState.ExecutorState +import org.apache.linkis.server.BDPJettyServerHelper import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils @@ -69,9 +72,14 @@ abstract class EntranceExecutor(val id: Long) extends Executor with Logging { } override def execute(executeRequest: ExecuteRequest): ExecuteResponse = { - var request: RequestTask = null - interceptors.foreach(in => request = in.apply(request, executeRequest)) - callExecute(executeRequest) + LoggerUtils.setJobIdMDC(getId.toString) + Utils.tryFinally { + var request: RequestTask = null + interceptors.foreach(in => request = in.apply(request, executeRequest)) + callExecute(executeRequest) + } { + LoggerUtils.removeJobIdMDC() + } } protected def callback(): Unit = {} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala index 0d5d605983..05bc5311b0 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/execute/EntranceExecutorManager.scala @@ -90,7 +90,7 @@ abstract class EntranceExecutorManager(groupFactory: GroupFactory) job.getJobRequest match { case jobReq: JobRequest => val entranceEntranceExecutor = - new DefaultEntranceExecutor(idGenerator.incrementAndGet()) + new DefaultEntranceExecutor(jobReq.getId) // getEngineConn Executor job.getLogListener.foreach( _.onLogUpdate( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala index bbb904c6a7..69b96cfe24 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/CommentInterceptor.scala @@ -103,6 +103,22 @@ object ScalaCommentHelper extends CommentHelper { private val scalaCommentPattern: String = "(?ms)([\"'](?:|[^'])*['\"])|//.*?$|/\\*.*?\\*/" override def dealComment(code: String): String = code +// private val logger: Logger = LoggerFactory.getLogger(getClass) +// +// override def dealComment(code: String): String = { +// try { +// val p = Pattern.compile(scalaCommentPattern) +// val sql = p.matcher(code).replaceAll("$1") +// sql +// } catch { +// case e: Exception => +// logger.warn("scala comment failed") +// code +// case t: Throwable => +// logger.warn("scala comment failed") +// code +// } +// } } object CommentMain { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala index 1914730d51..1f402c907d 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/Explain.scala @@ -133,6 +133,8 @@ object SQLExplain extends Explain { .generateWarn("please pay attention ,SQL full export mode opened(请注意,SQL全量导出模式打开)\n") ) } + var isFirstTimePrintingLimit = true + var isFirstTimePrintingOverLimit = true if (tempCode.contains("""\;""")) { val semicolonIndexes = findRealSemicolonIndex(tempCode) var oldIndex = 0 @@ -142,21 +144,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim @@ -170,21 +178,27 @@ object SQLExplain extends Explain { if (isSelectCmd(singleCode)) { val trimCode = singleCode.trim if (isSelectCmdNoLimit(trimCode) && !isNoLimitAllowed) { - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql without limit, DSS will add limit 5000 to your sql" - ) + "\n" - ) + if (isFirstTimePrintingLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql without limit, DSS will add limit 5000 to your sql" + ) + "\n" + ) + isFirstTimePrintingLimit = false + } // 将注释先干掉,然后再进行添加limit val realCode = cleanComment(trimCode) fixedCode += (realCode + SQL_APPEND_LIMIT) } else if (isSelectOverLimit(singleCode) && !isNoLimitAllowed) { val trimCode = singleCode.trim - logAppender.append( - LogUtils.generateWarn( - s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" - ) + "\n" - ) + if (isFirstTimePrintingOverLimit) { + logAppender.append( + LogUtils.generateWarn( + s"You submitted a sql with limit exceeding 5000, it is not allowed. DSS will change your limit to 5000" + ) + "\n" + ) + isFirstTimePrintingOverLimit = false + } fixedCode += repairSelectOverLimit(trimCode) } else { fixedCode += singleCode.trim diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/TriggerEventListener.java b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala similarity index 58% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/TriggerEventListener.java rename to linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala index 8811441d02..e59557efa0 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/observer/listener/TriggerEventListener.java +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfInterceptor.scala @@ -15,21 +15,22 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.observer.listener; +package org.apache.linkis.entrance.interceptor.impl -import org.apache.linkis.cli.application.observer.event.LinkisClientEvent; +import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.interceptor.EntranceInterceptor +import org.apache.linkis.governance.common.entity.job.JobRequest -import java.util.concurrent.atomic.AtomicBoolean; +import java.lang -public class TriggerEventListener implements LinkisClientListener { - AtomicBoolean atomicFlag = new AtomicBoolean(false); +class TemplateConfInterceptor extends EntranceInterceptor { - @Override - public void update(LinkisClientEvent event, Object msg) { - atomicFlag.compareAndSet(false, true); + override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + if (EntranceConfiguration.TEMPLATE_CONF_SWITCH.getValue) { + TemplateConfUtils.dealWithStartParams(jobRequest, logAppender) + } else { + jobRequest + } } - public Boolean isTriggered() { - return atomicFlag.get(); - } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala new file mode 100644 index 0000000000..a3a7a3f317 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TemplateConfUtils.scala @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl + +import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.governance.common.entity.TemplateConfKey +import org.apache.linkis.governance.common.entity.job.JobRequest +import org.apache.linkis.governance.common.protocol.conf.{TemplateConfRequest, TemplateConfResponse} +import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.{lang, util} +import java.util.concurrent.TimeUnit + +import scala.collection.JavaConverters._ + +import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} + +object TemplateConfUtils extends Logging { + + private val templateCache: LoadingCache[String, util.List[TemplateConfKey]] = CacheBuilder + .newBuilder() + .maximumSize(1000) + .expireAfterWrite(5, TimeUnit.MINUTES) + .build(new CacheLoader[String, util.List[TemplateConfKey]]() { + + override def load(templateUuid: String): util.List[TemplateConfKey] = { + var templateList = Utils.tryAndWarn { + val sender: Sender = Sender + .getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) + + logger.info(s"load template configuration data templateUuid:$templateUuid") + val res = sender.ask(new TemplateConfRequest(templateUuid)) match { + case response: TemplateConfResponse => + logger + .debug(s"${response.getList()}") + response.getList + case _ => + logger + .warn(s"load template configuration data templateUuid:$templateUuid loading failed") + new util.ArrayList[TemplateConfKey](0) + } + res + } + if (templateList.size() == 0) { + logger.warn(s"template configuration data loading failed, plaese check warn log") + } + templateList + } + + }) + + def dealWithStartParams(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = { + jobRequest match { + case requestPersistTask: JobRequest => + val params = requestPersistTask.getParams + val startMap = TaskUtils.getStartupMap(params) + logger.info("jobRequest startMap params :{} ", startMap) + val templateUuid = startMap.getOrDefault(LabelKeyConstant.TEMPLATE_CONF_KEY, "").toString + if (StringUtils.isBlank(templateUuid)) { + logger.debug("jobRequest startMap param template id is empty") + } else { + logger.info("try to get template conf list with templateUid:{} ", templateUuid) + logAppender.append(s"try to get template conf list with templateUid:$templateUuid") + val templateConflist = templateCache.get(templateUuid) + if (templateConflist != null && templateConflist.size() > 0) { + val keyList = new util.HashMap[String, AnyRef]() + templateConflist.asScala.foreach(ele => { + val key = ele.getKey + val oldValue = startMap.get(key) + if (oldValue != null && StringUtils.isNotBlank(oldValue.toString)) { + logger.info(s"key:$key value:$oldValue not empty, skip to deal") + } else { + val newValue = ele.getConfigValue + logger.info(s"key:$key value:$newValue will add to startMap params") + if (TaskUtils.isWithDebugInfo(params)) { + logAppender.append(s"add $key=$newValue\n") + } + keyList.put(key, newValue) + } + + }) + if (keyList.size() > 0) { + TaskUtils.addStartupMap(params, keyList) + } + } + + } + case _ => + } + jobRequest + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala index 1deee62a72..49f44edbae 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/TenantLabelSetUtils.scala @@ -95,6 +95,11 @@ object TenantLabelSetUtils extends Logging { "*-" + LabelUtil.getUserCreatorLabel(jobRequest.getLabels).getCreator.toLowerCase() ) } + if (StringUtils.isBlank(tenant)) { + tenant = userCreatorTenantCache.get( + LabelUtil.getUserCreatorLabel(jobRequest.getLabels).getUser.toLowerCase() + "-*" + ) + } logger.info("get cache tenant:" + tenant + ",jobRequest:" + jobRequest.getId) // Add cached data if it is not empty if (StringUtils.isNotBlank(tenant)) { diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala index 0487a238cf..72d40305a6 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/interceptor/impl/VarSubstitutionInterceptor.scala @@ -41,10 +41,26 @@ class VarSubstitutionInterceptor extends EntranceInterceptor { LogUtils.generateInfo("Program is substituting variables for you") + "\n" ) val codeType = LabelUtil.getCodeType(jobRequest.getLabels) - jobRequest.setExecutionCode(CustomVariableUtils.replaceCustomVar(jobRequest, codeType)) + val realCode = CustomVariableUtils.replaceCustomVar(jobRequest, codeType) + jobRequest.setExecutionCode(realCode) logAppender.append( LogUtils.generateInfo("Variables substitution ended successfully") + "\n" ) + // print code after variables substitution + logAppender.append( + LogUtils.generateInfo( + "You have submitted a new job, script code (after variable substitution) is" + ) + "\n" + ); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ) + logAppender.append(realCode); + logAppender.append("\n"); + logAppender.append( + "************************************SCRIPT CODE************************************" + "\n" + ); + jobRequest } { case e: VarSubstitutionException => diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala index 9028c469ab..b54dc757cd 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/CacheLogWriter.scala @@ -41,6 +41,8 @@ class CacheLogWriter(logPath: String, charset: String, sharedCache: Cache, user: val sb = new StringBuilder if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) + // need append latest msg before clear + sb.append(msg).append("\n") sharedCache.cachedLogs.fakeClear() super.write(sb.toString()) pushTime.setTime( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala index 24633dfbb2..5ac90add66 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/HDFSCacheLogWriter.scala @@ -99,6 +99,8 @@ class HDFSCacheLogWriter(logPath: String, charset: String, sharedCache: Cache, u val sb = new StringBuilder if (removed != null) sb.append(removed).append("\n") logs.filter(_ != null).foreach(log => sb.append(log).append("\n")) + // need append latest msg before fake clear + sb.append(msg).append("\n") sharedCache.cachedLogs.fakeClear() writeToFile(sb.toString()) pushTime.setTime( diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala index 155d8c7bd5..5b62a49aa1 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/log/LoopArray.scala @@ -51,7 +51,7 @@ class LoopArray[T](maxCapacity: Int) { } else if (index > _max) { throw new IllegalArgumentException("The index " + index + " must be less than " + _max) } - val _index = (flag + (index - realSize)) % maxCapacity + val _index = (flag + (index - realSize + maxCapacity - 1)) % maxCapacity eventQueue(_index).asInstanceOf[T] } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala index 4b9b4570f1..e5c657023e 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/orchestrator/plugin/EntranceUserParallelOrchestratorPlugin.scala @@ -20,19 +20,15 @@ package org.apache.linkis.entrance.orchestrator.plugin import org.apache.linkis.common.conf.Configuration import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.orchestrator.plugin.UserParallelOrchestratorPlugin import org.apache.linkis.rpc.Sender -import org.apache.linkis.server.BDPJettyServerHelper - -import org.apache.commons.lang3.StringUtils import java.util import java.util.concurrent.TimeUnit @@ -43,10 +39,6 @@ import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlugin with Logging { - private val SPLIT = "," - - private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory - private def getDefaultMaxRuningNum: Int = { EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue() } @@ -62,7 +54,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu .build(new CacheLoader[String, Integer]() { override def load(key: String): Integer = { - val (userCreatorLabel, engineTypeLabel) = fromKeyGetLabels(key) + val (userCreatorLabel, engineTypeLabel) = EntranceUtils.fromKeyGetLabels(key) val keyAndValue = Utils.tryAndWarnMsg { sender .ask(RequestQueryEngineConfigWithGlobalConfig(userCreatorLabel, engineTypeLabel)) @@ -75,10 +67,8 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu null == keyAndValue || !keyAndValue .containsKey(EntranceConfiguration.WDS_LINKIS_INSTANCE.key) ) { - logger.error( - s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + - s"will use default value ${EntranceConfiguration.WDS_LINKIS_INSTANCE.getHotValue()}。All config map: ${BDPJettyServerHelper.gson - .toJson(keyAndValue)}" + logger.warn( + s"cannot found user configuration key:${EntranceConfiguration.WDS_LINKIS_INSTANCE.key}," + s"will use default value " ) } val maxRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue, true) @@ -102,27 +92,7 @@ class EntranceUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlu if (null == userCreatorLabel || null == engineTypeLabel) { return getDefaultMaxRuningNum } - configCache.get(getKey(userCreatorLabel, engineTypeLabel)) - } - - private def getKey( - userCreatorLabel: UserCreatorLabel, - engineTypeLabel: EngineTypeLabel - ): String = { - userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue - } - - private def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { - if (StringUtils.isBlank(key)) (null, null) - else { - val labelStringValues = key.split(SPLIT) - if (labelStringValues.length < 2) return (null, null) - val userCreatorLabel = labelFactory - .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) - val engineTypeLabel = labelFactory - .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) - (userCreatorLabel, engineTypeLabel) - } + configCache.get(EntranceUtils.getUserCreatorEcTypeKey(userCreatorLabel, engineTypeLabel)) } override def isReady: Boolean = true diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala index 111350806e..2ba98438e8 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/persistence/EntranceResultSetEngine.scala @@ -17,20 +17,12 @@ package org.apache.linkis.entrance.persistence -import org.apache.linkis.common.io.{FsPath, MetaData, Record} -import org.apache.linkis.common.io.resultset.ResultSet -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} -import org.apache.linkis.entrance.execute.StorePathExecuteRequest -import org.apache.linkis.entrance.job.{EntranceExecuteRequest, EntranceExecutionJob} -import org.apache.linkis.entrance.scheduler.cache.CacheOutputExecuteResponse -import org.apache.linkis.governance.common.entity.job.SubJobDetail import org.apache.linkis.scheduler.executer.{AliasOutputExecuteResponse, OutputExecuteResponse} import org.apache.linkis.scheduler.queue.Job -import org.apache.linkis.storage.resultset.{ResultSetFactory, ResultSetWriter} -import org.apache.linkis.storage.utils.FileSystemUtils +import org.apache.linkis.storage.resultset.ResultSetFactory -import org.apache.commons.io.IOUtils import org.apache.commons.lang3.StringUtils class EntranceResultSetEngine extends ResultSetEngine with Logging { @@ -46,15 +38,11 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc ) } - case CacheOutputExecuteResponse(alias, output) => - if (ResultSetFactory.getInstance.isResultSetPath(output)) { - getDir(output) - } else { - throw new EntranceErrorException( - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, - EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc - ) - } + case _ => + throw new EntranceErrorException( + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getErrCode, + EntranceErrorCode.RESULT_NOT_PERSISTED_ERROR.getDesc + ) } } @@ -64,7 +52,7 @@ class EntranceResultSetEngine extends ResultSetEngine with Logging { } else { val arr = str.split("/").filter(StringUtils.isNotBlank) if (arr.length <= 2) { - return str + str } else { str.substring(0, str.lastIndexOf("/")) } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala index 7f16dd2463..a436178880 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/EntranceGroupFactory.scala @@ -17,30 +17,20 @@ package org.apache.linkis.entrance.scheduler -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.conf.{CommonVars, Configuration} import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.entrance.conf.EntranceConfiguration import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ import org.apache.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException} import org.apache.linkis.entrance.execute.EntranceJob +import org.apache.linkis.entrance.utils.EntranceUtils import org.apache.linkis.governance.common.protocol.conf.{ RequestQueryEngineConfigWithGlobalConfig, ResponseQueryConfig } -import org.apache.linkis.instance.label.client.InstanceLabelClient -import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext -import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} import org.apache.linkis.manager.label.entity.Label -import org.apache.linkis.manager.label.entity.engine.{ - ConcurrentEngineConnLabel, - EngineTypeLabel, - UserCreatorLabel -} -import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} import org.apache.linkis.manager.label.utils.LabelUtil -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils import org.apache.linkis.rpc.Sender import org.apache.linkis.scheduler.queue.{Group, GroupFactory, SchedulerEvent} import org.apache.linkis.scheduler.queue.parallelqueue.ParallelGroup @@ -51,8 +41,6 @@ import java.util import java.util.concurrent.TimeUnit import java.util.regex.Pattern -import scala.collection.JavaConverters._ - import com.google.common.cache.{Cache, CacheBuilder} class EntranceGroupFactory extends GroupFactory with Logging { @@ -81,29 +69,16 @@ class EntranceGroupFactory extends GroupFactory with Logging { } override def getOrCreateGroup(event: SchedulerEvent): Group = { - val (labels, params) = event match { + val labels = event match { case job: EntranceJob => - (job.getJobRequest.getLabels, job.getJobRequest.getParams) + job.getJobRequest.getLabels + case _ => + throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - val groupName = EntranceGroupFactory.getGroupNameByLabels(labels, params) + val groupName = EntranceGroupFactory.getGroupNameByLabels(labels) val cacheGroup = groupNameToGroups.getIfPresent(groupName) if (null == cacheGroup) synchronized { val maxAskExecutorTimes = EntranceConfiguration.MAX_ASK_EXECUTOR_TIME.getValue.toLong - if (groupName.startsWith(EntranceGroupFactory.CONCURRENT)) { - if (null == groupNameToGroups.getIfPresent(groupName)) synchronized { - if (null == groupNameToGroups.getIfPresent(groupName)) { - val group = new ParallelGroup( - groupName, - 100, - EntranceConfiguration.CONCURRENT_FACTORY_MAX_CAPACITY.getValue - ) - group.setMaxRunningJobs(EntranceConfiguration.CONCURRENT_MAX_RUNNING_JOBS.getValue) - group.setMaxAskExecutorTimes(EntranceConfiguration.CONCURRENT_EXECUTOR_TIME.getValue) - groupNameToGroups.put(groupName, group) - return group - } - } - } val sender: Sender = Sender.getSender(Configuration.CLOUD_CONSOLE_CONFIGURATION_SPRING_APPLICATION_NAME.getValue) val userCreatorLabel: UserCreatorLabel = LabelUtil.getUserCreatorLabel(labels) @@ -141,8 +116,11 @@ class EntranceGroupFactory extends GroupFactory with Logging { group.setMaxRunningJobs(maxRunningJobs) group.setMaxAskExecutorTimes(maxAskExecutorTimes) groupNameToGroups.put(groupName, group) + group + } + else { + cacheGroup } - groupNameToGroups.getIfPresent(groupName) } override def getGroup(groupName: String): Group = { @@ -156,105 +134,40 @@ class EntranceGroupFactory extends GroupFactory with Logging { group } + /** + * User task concurrency control is controlled for multiple Entrances, which will be evenly + * distributed based on the number of existing Entrances + * @param keyAndValue + * @return + */ private def getUserMaxRunningJobs(keyAndValue: util.Map[String, String]): Int = { - var userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) - var entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length - val labelList = new util.ArrayList[Label[_]]() - val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory - .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) - labelList.add(offlineRouteLabel) - var offlineIns: Array[ServiceInstance] = null - Utils.tryAndWarn { - offlineIns = InstanceLabelClient.getInstance - .getInstanceFromLabel(labelList) - .asScala - .filter(l => - null != l && l.getApplicationName - .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) - ) - .toArray - } - if (null != offlineIns) { - logger.info(s"There are ${offlineIns.length} offlining instance.") - entranceNum = entranceNum - offlineIns.length - } - /* - Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. - */ - if (0 >= entranceNum) { - logger.error( - s"Got ${entranceNum} ${Sender.getThisServiceInstance.getApplicationName} instances." - ) - entranceNum = 1 - } + val userDefinedRunningJobs = EntranceConfiguration.WDS_LINKIS_INSTANCE.getValue(keyAndValue) + val entranceNum = EntranceUtils.getRunningEntranceNumber() Math.max( EntranceConfiguration.ENTRANCE_INSTANCE_MIN.getValue, userDefinedRunningJobs / entranceNum - ); + ) } } object EntranceGroupFactory { - val CACHE = "_Cache" - - val CONCURRENT = "Concurrent_" - - def getGroupName( - creator: String, - user: String, - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - if (StringUtils.isNotEmpty(creator)) creator + "_" + user + cache - else EntranceConfiguration.DEFAULT_REQUEST_APPLICATION_NAME.getValue + "_" + user + cache - } - - def getGroupNameByLabels( - labels: java.util.List[Label[_]], - params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] - ): String = { - - val userCreator = labels.asScala.find(_.isInstanceOf[UserCreatorLabel]) - val engineType = labels.asScala.find(_.isInstanceOf[EngineTypeLabel]) - val concurrent = labels.asScala.find(_.isInstanceOf[ConcurrentEngineConnLabel]) - if (userCreator.isEmpty || engineType.isEmpty) { + /** + * Entrance group rule creator_username_engineType eg:IDE_PEACEWONG_SPARK + * @param labels + * @param params + * @return + */ + def getGroupNameByLabels(labels: java.util.List[Label[_]]): String = { + val userCreatorLabel = LabelUtil.getUserCreatorLabel(labels) + val engineTypeLabel = LabelUtil.getEngineTypeLabel(labels) + if (null == userCreatorLabel || null == engineTypeLabel) { throw new EntranceErrorException(LABEL_NOT_NULL.getErrorCode, LABEL_NOT_NULL.getErrorDesc) } - - if (concurrent.isDefined) { - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - val groupName = CONCURRENT + engineTypeLabel.getEngineType - groupName - - } else { - val userCreatorLabel = userCreator.get.asInstanceOf[UserCreatorLabel] - - val engineTypeLabel = engineType.get.asInstanceOf[EngineTypeLabel] - - val runtime = TaskUtils.getRuntimeMap(params) - val cache = - if ( - runtime.get(TaskConstant.READ_FROM_CACHE) != null && runtime - .get(TaskConstant.READ_FROM_CACHE) - .asInstanceOf[Boolean] - ) { - CACHE - } else "" - val groupName = - userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + cache - groupName - } + val groupName = + userCreatorLabel.getCreator + "_" + userCreatorLabel.getUser + "_" + engineTypeLabel.getEngineType + groupName } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala deleted file mode 100644 index 47a6ce9e9e..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/CacheOutputExecuteResponse.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.scheduler.executer.OutputExecuteResponse - -case class CacheOutputExecuteResponse(alias: String, output: String) extends OutputExecuteResponse { - override def getOutput: String = output -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala deleted file mode 100644 index 65bbbd39b4..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumer.scala +++ /dev/null @@ -1,158 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.common.io.FsPath -import org.apache.linkis.common.utils.Utils -import org.apache.linkis.entrance.errorcode.EntranceErrorCodeSummary._ -import org.apache.linkis.entrance.exception.CacheNotReadyException -import org.apache.linkis.entrance.execute.EntranceJob -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.utils.JobHistoryHelper -import org.apache.linkis.governance.common.entity.job.JobRequest -import org.apache.linkis.manager.label.constant.LabelKeyConstant -import org.apache.linkis.protocol.constants.TaskConstant -import org.apache.linkis.protocol.utils.TaskUtils -import org.apache.linkis.scheduler.SchedulerContext -import org.apache.linkis.scheduler.errorcode.LinkisSchedulerErrorCodeSummary._ -import org.apache.linkis.scheduler.exception.SchedulerErrorException -import org.apache.linkis.scheduler.executer.SuccessExecuteResponse -import org.apache.linkis.scheduler.queue.Group -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.server.BDPJettyServerHelper -import org.apache.linkis.storage.FSFactory -import org.apache.linkis.storage.fs.FileSystem - -import org.apache.commons.io.FilenameUtils -import org.apache.commons.lang3.StringUtils - -import java.util.concurrent.ExecutorService - -import scala.collection.JavaConverters._ - -import com.google.common.collect.Lists - -class ReadCacheConsumer( - schedulerContext: SchedulerContext, - executeService: ExecutorService, - private var group: Group, - persistenceManager: PersistenceManager -) extends FIFOUserConsumer(schedulerContext, executeService, group) { - - override protected def loop(): Unit = { - val event = Option(getConsumeQueue.take()) - event.foreach { - case job: EntranceJob => - job.getJobRequest match { - case jobRequest: JobRequest => - Utils.tryCatch { - val engineTpyeLabel = jobRequest.getLabels.asScala - .filter(l => l.getLabelKey.equalsIgnoreCase(LabelKeyConstant.ENGINE_TYPE_KEY)) - .headOption - .getOrElse(null) - val labelStrList = jobRequest.getLabels.asScala.map { case l => - l.getStringValue - }.toList - if (null == engineTpyeLabel) { - logger.error( - "Invalid engineType null, cannot process. jobReq : " + BDPJettyServerHelper.gson - .toJson(jobRequest) - ) - throw CacheNotReadyException( - INVALID_ENGINETYPE_NULL.getErrorCode, - INVALID_ENGINETYPE_NULL.getErrorDesc - ) - } - val readCacheBefore: Long = TaskUtils - .getRuntimeMap(job.getParams) - .getOrDefault(TaskConstant.READ_CACHE_BEFORE, 300L: java.lang.Long) - .asInstanceOf[Long] - val cacheResult = JobHistoryHelper.getCache( - jobRequest.getExecutionCode, - jobRequest.getExecuteUser, - labelStrList.asJava, - readCacheBefore - ) - if (cacheResult != null && StringUtils.isNotBlank(cacheResult.getResultLocation)) { - val resultSets = listResults(cacheResult.getResultLocation, job.getUser) - if (resultSets.size() > 0) { - for (resultSet: FsPath <- resultSets.asScala) { - val alias = FilenameUtils.getBaseName(resultSet.getPath) - val output = FsPath - .getFsPath( - cacheResult.getResultLocation, - FilenameUtils.getName(resultSet.getPath) - ) - .getSchemaPath -// persistenceManager.onResultSetCreated(job, new CacheOutputExecuteResponse(alias, output)) - throw CacheNotReadyException( - INVALID_RESULTSETS.getErrorCode, - INVALID_RESULTSETS.getErrorDesc - ) - // todo check - } -// persistenceManager.onResultSizeCreated(job, resultSets.size()) - } - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - job.transitionCompleted(SuccessExecuteResponse(), "Result found in cache") - } else { - logger.info("Cache not found, submit to normal consumer.") - submitToExecute(job) - } - } { t => - logger.warn("Read cache failed, submit to normal consumer: ", t) - submitToExecute(job) - } - case _ => - } - case _ => - } - } - - private def listResults(resultLocation: String, user: String) = { - val dirPath = FsPath.getFsPath(resultLocation) - val fileSystem = FSFactory.getFsByProxyUser(dirPath, user).asInstanceOf[FileSystem] - Utils.tryFinally { - fileSystem.init(null) - if (fileSystem.exists(dirPath)) { - fileSystem.listPathWithError(dirPath).getFsPaths - } else { - Lists.newArrayList[FsPath]() - } - }(Utils.tryQuietly(fileSystem.close())) - } - - private def submitToExecute(job: EntranceJob): Unit = { - val runtime = TaskUtils.getRuntimeMap(job.getParams) - runtime.put(TaskConstant.READ_FROM_CACHE, java.lang.Boolean.FALSE) - TaskUtils.addRuntimeMap(job.getParams, runtime) - val groupName = schedulerContext.getOrCreateGroupFactory.getOrCreateGroup(job).getGroupName - val consumer = schedulerContext.getOrCreateConsumerManager.getOrCreateConsumer(groupName) - val index = consumer.getConsumeQueue.offer(job) - // index.map(getEventId(_, groupName)).foreach(job.setId) - if (index.isEmpty) { - throw new SchedulerErrorException( - JOB_QUEUE_IS_FULL.getErrorCode, - JOB_QUEUE_IS_FULL.getErrorDesc - ) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala deleted file mode 100644 index a4cba19f34..0000000000 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/scheduler/cache/ReadCacheConsumerManager.scala +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.entrance.scheduler.cache - -import org.apache.linkis.entrance.persistence.PersistenceManager -import org.apache.linkis.entrance.scheduler.EntranceGroupFactory -import org.apache.linkis.scheduler.queue.fifoqueue.FIFOUserConsumer -import org.apache.linkis.scheduler.queue.parallelqueue.ParallelConsumerManager - -class ReadCacheConsumerManager(maxParallelismUsers: Int, persistenceManager: PersistenceManager) - extends ParallelConsumerManager(maxParallelismUsers) { - - override protected def createConsumer(groupName: String): FIFOUserConsumer = { - val group = getSchedulerContext.getOrCreateGroupFactory.getGroup(groupName) - if (groupName.endsWith(EntranceGroupFactory.CACHE)) { - logger.info("Create cache consumer with group: " + groupName) - new ReadCacheConsumer( - getSchedulerContext, - getOrCreateExecutorService, - group, - persistenceManager - ) - } else { - logger.info("Create normal consumer with group: " + groupName) - new FIFOUserConsumer(getSchedulerContext, getOrCreateExecutorService, group) - } - } - -} diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala index aaaf131bd8..4e62430316 100644 --- a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/timeout/JobTimeoutManager.scala @@ -38,8 +38,8 @@ class JobTimeoutManager extends Logging { private[this] final val timeoutJobByName: ConcurrentMap[String, EntranceJob] = new ConcurrentHashMap[String, EntranceJob] - val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue - val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue + private val timeoutCheck: Boolean = EntranceConfiguration.ENABLE_JOB_TIMEOUT_CHECK.getValue + private val timeoutScanInterval: Int = EntranceConfiguration.TIMEOUT_SCAN_INTERVAL.getValue def add(jobKey: String, job: EntranceJob): Unit = { logger.info(s"Adding timeout job: ${job.getId()}") @@ -77,75 +77,75 @@ class JobTimeoutManager extends Logging { } private def timeoutDetective(): Unit = { - if (timeoutCheck) { - def checkAndSwitch(job: EntranceJob): Unit = { - logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") - val currentTimeSeconds = System.currentTimeMillis() / 1000 - // job.isWaiting == job in queue - val jobScheduleStartTimeSeconds = - if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds - val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds - val jobRunningStartTimeSeconds = - if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds - val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds - if (!job.isCompleted) { - job.jobRequest.getLabels.asScala foreach { - case queueTimeOutLabel: JobQueuingTimeoutLabel => - if ( - job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", - null - ) - } - case jobRunningTimeoutLabel: JobRunningTimeoutLabel => - if ( - job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout - ) { - logger.warn( - s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " - ) - job.onFailure( - s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", - null - ) - } - case _ => - } + def checkAndSwitch(job: EntranceJob): Unit = { + logger.info(s"Checking whether the job id ${job.getJobRequest.getId()} timed out. ") + val currentTimeSeconds = System.currentTimeMillis() / 1000 + // job.isWaiting == job in queue + val jobScheduleStartTimeSeconds = + if (job.isWaiting) job.createTime / 1000 else currentTimeSeconds + val queuingTimeSeconds = currentTimeSeconds - jobScheduleStartTimeSeconds + val jobRunningStartTimeSeconds = + if (job.getStartTime > 0) job.getStartTime / 1000 else currentTimeSeconds + val runningTimeSeconds = currentTimeSeconds - jobRunningStartTimeSeconds + if (!job.isCompleted) { + job.jobRequest.getLabels.asScala foreach { + case queueTimeOutLabel: JobQueuingTimeoutLabel => + if ( + job.isWaiting && queueTimeOutLabel.getQueuingTimeout > 0 && queuingTimeSeconds >= queueTimeOutLabel.getQueuingTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} queued time : ${queuingTimeSeconds} seconds, which was over queueTimeOut : ${queueTimeOutLabel.getQueuingTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job queued ${queuingTimeSeconds} seconds over max queue time : ${queueTimeOutLabel.getQueuingTimeout} seconds.", + null + ) + } + case jobRunningTimeoutLabel: JobRunningTimeoutLabel => + if ( + job.isRunning && jobRunningTimeoutLabel.getRunningTimeout > 0 && runningTimeSeconds >= jobRunningTimeoutLabel.getRunningTimeout + ) { + logger.warn( + s"Job ${job.getJobRequest.getId()} run timeout ${runningTimeSeconds} seconds, which was over runTimeOut : ${jobRunningTimeoutLabel.getRunningTimeout} seconds, cancel it now! " + ) + job.onFailure( + s"Job run ${runningTimeSeconds} seconds over max run time : ${jobRunningTimeoutLabel.getRunningTimeout} seconds.", + null + ) + } + case _ => } } - - timeoutJobByName.asScala.foreach(item => { - logger.info(s"Running timeout detection!") - synchronized { - jobCompleteDelete(item._1) - if (jobExist(item._1)) checkAndSwitch(item._2) - } - }) } + + timeoutJobByName.asScala.foreach(item => { + logger.info(s"Running timeout detection!") + synchronized { + jobCompleteDelete(item._1) + if (jobExist(item._1)) checkAndSwitch(item._2) + } + }) } // Thread periodic scan timeout task - val woker = Utils.defaultScheduler.scheduleAtFixedRate( - new Runnable() { - - override def run(): Unit = { - Utils.tryCatch { - timeoutDetective() - } { case t: Throwable => - logger.error(s"TimeoutDetective task failed. ${t.getMessage}", t) + if (timeoutCheck) { + val woker = Utils.defaultScheduler.scheduleAtFixedRate( + new Runnable() { + + override def run(): Unit = { + Utils.tryCatch { + timeoutDetective() + } { case t: Throwable => + logger.warn(s"TimeoutDetective task failed. ${t.getMessage}", t) + } } - } - }, - 0, - timeoutScanInterval, - TimeUnit.SECONDS - ) + }, + 0, + timeoutScanInterval, + TimeUnit.SECONDS + ) + } } diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala new file mode 100644 index 0000000000..13dcefa9f9 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/main/scala/org/apache/linkis/entrance/utils/EntranceUtils.scala @@ -0,0 +1,108 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.utils + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.instance.label.client.InstanceLabelClient +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.constant.{LabelKeyConstant, LabelValueConstant} +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel} +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter + +object EntranceUtils extends Logging { + + private val SPLIT = "," + + private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory + + def getUserCreatorEcTypeKey( + userCreatorLabel: UserCreatorLabel, + engineTypeLabel: EngineTypeLabel + ): String = { + userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue + } + + def fromKeyGetLabels(key: String): (UserCreatorLabel, EngineTypeLabel) = { + if (StringUtils.isBlank(key)) (null, null) + else { + val labelStringValues = key.split(SPLIT) + if (labelStringValues.length < 2) return (null, null) + val userCreatorLabel = labelFactory + .createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY, labelStringValues(0)) + val engineTypeLabel = labelFactory + .createLabel[EngineTypeLabel](LabelKeyConstant.ENGINE_TYPE_KEY, labelStringValues(1)) + (userCreatorLabel, engineTypeLabel) + } + } + + def getDefaultCreatorECTypeKey(creator: String, ecType: String): String = { + val userCreatorLabel = + labelFactory.createLabel[UserCreatorLabel](LabelKeyConstant.USER_CREATOR_TYPE_KEY) + val ecTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(ecType) + userCreatorLabel.setUser("*") + userCreatorLabel.setCreator(creator) + getUserCreatorEcTypeKey(userCreatorLabel, ecTypeLabel) + } + + def getRunningEntranceNumber(): Int = { + val entranceNum = Sender.getInstances(Sender.getThisServiceInstance.getApplicationName).length + val labelList = new util.ArrayList[Label[_]]() + val offlineRouteLabel = LabelBuilderFactoryContext.getLabelBuilderFactory + .createLabel[RouteLabel](LabelKeyConstant.ROUTE_KEY, LabelValueConstant.OFFLINE_VALUE) + labelList.add(offlineRouteLabel) + var offlineIns: Array[ServiceInstance] = null + Utils.tryAndWarn { + offlineIns = InstanceLabelClient.getInstance + .getInstanceFromLabel(labelList) + .asScala + .filter(l => + null != l && l.getApplicationName + .equalsIgnoreCase(Sender.getThisServiceInstance.getApplicationName) + ) + .toArray + } + val entranceRealNumber = if (null != offlineIns) { + logger.info(s"There are ${offlineIns.length} offlining instance.") + entranceNum - offlineIns.length + } else { + entranceNum + } + /* + Sender.getInstances may get 0 instances due to cache in Sender. So this instance is the one instance. + */ + if (entranceRealNumber <= 0) { + logger.error( + s"Got ${entranceRealNumber} ${Sender.getThisServiceInstance.getApplicationName} instances." + ) + 1 + } else { + entranceRealNumber + } + } + +} diff --git a/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java b/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java new file mode 100644 index 0000000000..34482d293c --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestCommentHelper.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.entrance.interceptor.impl; + +import static org.junit.jupiter.api.Assertions.*; + +import java.util.Arrays; +import org.apache.linkis.common.utils.CodeAndRunTypeUtils; +import org.junit.jupiter.api.Test; +import org.junit.platform.commons.util.StringUtils; +import scala.reflect.internal.TypeDebugging; + +public class TestCommentHelper { + String sqlCode="" + + "--注解\n" + + "select * from table;\n" + + " --注解 \n" + + "--注解\n" + + " select \"--注解\" as test\n" + + " --@set yy=123\n" + + " --注解"; + + String scalaCode="" + + "// 注解\n" + + "print(1+1)\n" + + "//@set yy=123\n" + + " print(2)\n" + + " // 注解 \n" + + "// test\n" + + "print(\"//注解测试\")"; + + String scalaCodeRes="print(1+1)\n" + + "print(2)\n" + + "print(\"//注解测试\")"; + + @Test + void sqlDealCommentTest() { + String code=SQLCommentHelper.dealComment(sqlCode); + //System.out.println(code); + } + + @Test + void scalaDealCommentTest() { + String code=ScalaCommentHelper.dealComment(scalaCode); + String[] lines = Arrays.stream(code.split("\n")) + .map(String::trim) + .filter(x -> StringUtils.isNotBlank(x)) + .toArray(String[]::new); + String result=String.join("\n",lines); + // assertEquals(result,scalaCodeRes); + } + + +} \ No newline at end of file diff --git a/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java b/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java new file mode 100644 index 0000000000..34826ff222 --- /dev/null +++ b/linkis-computation-governance/linkis-entrance/src/test/scala/org/apache/linkis/entrance/interceptor/impl/TestHDFSCacheLogWriter.java @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.linkis.entrance.interceptor.impl; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.UUID; +import org.apache.commons.io.FileUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.linkis.cs.common.entity.source.ContextID; +import org.apache.linkis.entrance.log.Cache; +import org.apache.linkis.entrance.log.HDFSCacheLogWriter; +import org.apache.linkis.rpc.BaseRPCSender; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.storage.fs.FileSystem; +import org.apache.linkis.storage.fs.impl.LocalFileSystem; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.Mockito; +import oshi.util.FileUtil; + +class TestHDFSCacheLogWriter { + + + @Test + void write() throws IOException { + + Cache cache = new Cache(5); + String fileName= UUID.randomUUID().toString().replace("-", "")+"-test.log"; + String logPath = System.getProperty("java.io.tmpdir")+ File.separator+fileName; + System.out.println(logPath); + String chartSet="utf-8"; + String username=System.getProperty("user.name"); + + File file=new File(logPath); + file.createNewFile(); + + HDFSCacheLogWriter logWriter =new HDFSCacheLogWriter( + //"D:\\DataSphere\\linkis\\docs\\test.log", + logPath, + chartSet, + cache, + username + ); + + String[] msgArr =new String[]{"1","2","3","4","5","6","7","8","9","10","11","12","13","14","15","16","17","18"}; + + List msgList = new ArrayList(Arrays.asList(msgArr)); + String msg=String.join("\n", msgList); + + logWriter.write(msg); + logWriter.flush(); + + List list = FileUtil.readFile(logPath); + String res=String.join("\n", list); + + res=res.replace("\n\n","\n"); + res=StringUtils.strip(res, " \n"); + Assertions.assertEquals(res,msg); + + + } +} \ No newline at end of file diff --git a/linkis-computation-governance/linkis-jdbc-driver/pom.xml b/linkis-computation-governance/linkis-jdbc-driver/pom.xml index cdf23c5f21..28437d95a5 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/pom.xml +++ b/linkis-computation-governance/linkis-jdbc-driver/pom.xml @@ -41,6 +41,15 @@ + + + org.apache.maven.plugins + maven-surefire-plugin + + true + + + net.alchim31.maven scala-maven-plugin diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLErrorCode.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/LinkisSQLErrorCode.java similarity index 90% rename from linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLErrorCode.java rename to linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/LinkisSQLErrorCode.java index fc283d8fbe..442cbbfb43 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLErrorCode.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/LinkisSQLErrorCode.java @@ -17,7 +17,7 @@ package org.apache.linkis.ujes.jdbc; -public enum UJESSQLErrorCode { +public enum LinkisSQLErrorCode { BAD_URL(80000, "bad url"), NOSUPPORT_DRIVER(80001, "this method not supported in driver"), NOSUPPORT_CONNECTION(80002, "this method not supported in connection"), @@ -38,11 +38,12 @@ public enum UJESSQLErrorCode { RESULTSET_NULL( 80017, "resultset is null,try to run next() firstly to init ResultSet and MetaData"), PREPARESTATEMENT_TYPEERROR(80018, "parameter type error"), - METADATA_EMPTY(80019, "data is empty"); + METADATA_EMPTY(80019, "data is empty"), + UNKNOWN_ERROR(80020, "unknown error"); private String msg; private int code; - UJESSQLErrorCode(int code, String msg) { + LinkisSQLErrorCode(int code, String msg) { this.code = code; this.msg = msg; } @@ -54,4 +55,8 @@ public String getMsg() { public int getCode() { return code; } + + public void setMsg(String msg) { + this.msg = msg; + } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java index 90e1f73563..c12e2791b3 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/UJESSQLDriver.java @@ -48,9 +48,7 @@ public class UJESSQLDriver extends UJESSQLDriverMain implements Driver { static String TOKEN_VALUE = "value"; static String PASSWORD = "password"; static boolean TABLEAU_SERVER = false; - static String LIMIT_ENABLED = "true"; - static String LIMIT = "limit"; - + static String FIXED_SESSION = "fixedSession"; static String VERSION = "version"; static int DEFAULT_VERSION = 1; static String MAX_CONNECTION_SIZE = "maxConnectionSize"; @@ -58,7 +56,7 @@ public class UJESSQLDriver extends UJESSQLDriverMain implements Driver { static String ENABLE_DISCOVERY = "enableDiscovery"; static String ENABLE_LOADBALANCER = "enableLoadBalancer"; static String CREATOR = "creator"; - + static String TABLEAU = "tableau"; static String VARIABLE_HEADER = "var:"; static String PARAM_SPLIT = "&"; static String KV_SPLIT = "="; diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java index 061acd42f9..3e1e7e3182 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/java/org/apache/linkis/ujes/jdbc/utils/JDBCUtils.java @@ -17,10 +17,16 @@ package org.apache.linkis.ujes.jdbc.utils; +import org.apache.linkis.common.utils.Utils; + +import java.util.concurrent.atomic.AtomicInteger; + public class JDBCUtils { private static final char SEARCH_STRING_ESCAPE = '\\'; + public static final AtomicInteger idCreator = new AtomicInteger(); + public static String convertPattern(final String pattern) { if (pattern == null) { return ".*"; @@ -41,8 +47,6 @@ public static String convertPattern(final String pattern) { continue; } else if (c == '%') { result.append(".*"); - } else if (c == '_') { - result.append('.'); } else { result.append(Character.toLowerCase(c)); } @@ -52,4 +56,8 @@ public static String convertPattern(final String pattern) { return result.toString(); } } + + public static String getUniqId() { + return Utils.getLocalHostname() + "_" + JDBCUtils.idCreator.getAndIncrement(); + } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala new file mode 100644 index 0000000000..a9b58e153b --- /dev/null +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLConnection.scala @@ -0,0 +1,484 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.ujes.jdbc + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.manager.label.entity.engine.{EngineType, EngineTypeLabel, RunType} +import org.apache.linkis.manager.label.utils.EngineTypeLabelCreator +import org.apache.linkis.ujes.client.UJESClient +import org.apache.linkis.ujes.client.request.JobSubmitAction +import org.apache.linkis.ujes.client.response.JobExecuteResult +import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain._ +import org.apache.linkis.ujes.jdbc.utils.JDBCUtils + +import org.apache.commons.lang3.StringUtils + +import java.{sql, util} +import java.sql.{ + Blob, + CallableStatement, + Clob, + Connection, + DatabaseMetaData, + NClob, + PreparedStatement, + ResultSet, + Savepoint, + SQLException, + SQLWarning, + SQLXML, + Statement, + Struct +} +import java.util.Properties +import java.util.concurrent.Executor + +import scala.collection.JavaConverters._ + +class LinkisSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Properties) + extends Connection + with Logging { + + private[jdbc] var creator = "JDBCDriver" + + private[jdbc] var tableauFlag = false + + private[jdbc] val variableMap = { + val params = props.getProperty(PARAMS) + val map = new util.HashMap[String, AnyRef] + if (params != null) { + params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { + case Array(k, v) if k.startsWith(VARIABLE_HEADER) => + map.put(k.substring(VARIABLE_HEADER.length), v) + case Array(CREATOR, v) => + creator = v + case _ => + } + } + map + } + + def isTableau(): Boolean = { + val params = props.getProperty(PARAMS) + if (params != null) { + params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { + case Array(TABLEAU, v) => + tableauFlag = true + case _ => + } + } + tableauFlag + } + + private[jdbc] val dbName = + if (StringUtils.isNotBlank(props.getProperty(DB_NAME))) props.getProperty(DB_NAME) + else "default" + + private val runningSQLStatements = new util.LinkedList[Statement] + + private var closed = false + + private var inited = false + + private[jdbc] val user = props.getProperty(USER) + + private[jdbc] val serverURL = props.getProperty("URL") + + private[jdbc] val fixedSessionEnabled = + if ( + props + .containsKey(FIXED_SESSION) && "true".equalsIgnoreCase(props.getProperty(FIXED_SESSION)) + ) { + true + } else { + false + } + + private val connectionId = JDBCUtils.getUniqId() + + private val labelMap: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + + private val startupParams: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + + private val runtimeParams: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef] + + private[jdbc] def getEngineType: EngineTypeLabel = { + val engineType: EngineTypeLabel = + EngineTypeLabelCreator.createEngineTypeLabel(EngineType.TRINO.toString) + if (props.containsKey(PARAMS)) { + val params = props.getProperty(PARAMS) + if (params != null & params.length() > 0) { + params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { + case Array(k, v) if k.equals(UJESSQLDriver.ENGINE_TYPE) => + return EngineTypeLabelCreator.createEngineTypeLabel(v) + case _ => + } + } + } + engineType + } + + private[jdbc] def throwWhenClosed[T](op: => T): T = + if (isClosed) throw new LinkisSQLException(LinkisSQLErrorCode.CONNECTION_CLOSED) + else op + + private def createStatementAndAdd[T <: Statement](op: => T): T = throwWhenClosed { + + val statement = op + runningSQLStatements.add(statement) + if (!inited) { + inited = true + Utils.tryAndWarn(statement.execute(s"USE $dbName")) + } + statement + } + + def getProps: Properties = props + + def removeStatement(statement: LinkisSQLStatement): Unit = runningSQLStatements.remove(statement) + + override def createStatement(): Statement = createStatementAndAdd(new LinkisSQLStatement(this)) + + override def prepareStatement(sql: String): LinkisSQLPreparedStatement = { + val statement = createStatementAndAdd(new LinkisSQLPreparedStatement(this, sql)) + statement.clearQuery() + statement + } + + override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = { + if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) { + throw new SQLException( + "Statement with resultset concurrency " + resultSetConcurrency + " is not supported", + "HYC00" + ) + } + if (resultSetType == ResultSet.TYPE_SCROLL_SENSITIVE) { + throw new SQLException( + "Statement with resultset type " + resultSetType + " is not supported", + "HYC00" + ) + } + createStatementAndAdd(new LinkisSQLStatement(this)) + } + + override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = + prepareStatement(sql) + + override def prepareStatement( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int + ): PreparedStatement = prepareStatement(sql) + + override def getMetaData: DatabaseMetaData = throwWhenClosed(new UJESSQLDatabaseMetaData(this)) + + override def close(): Unit = { + runningSQLStatements.asScala.foreach { statement => Utils.tryQuietly(statement.close()) } + closed = true + } + + override def isClosed: Boolean = closed + + override def setReadOnly(readOnly: Boolean): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setReadOnly not supported" + ) + + override def isReadOnly: Boolean = false + + override def setCatalog(catalog: String): Unit = throwWhenClosed() + + override def getCatalog: String = "" + + override def setTransactionIsolation(level: Int): Unit = {} + + override def getTransactionIsolation: Int = Connection.TRANSACTION_NONE + + override def getWarnings: SQLWarning = null + + override def clearWarnings(): Unit = {} + + override def setAutoCommit(autoCommit: Boolean): Unit = {} + + override def getAutoCommit: Boolean = true + + override def commit(): Unit = {} + + override def prepareCall(sql: String): CallableStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareCall not supported" + ) + + override def rollback(): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") + + override def nativeSQL(sql: String): String = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "nativeSQL not supported") + + override def prepareCall( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int + ): CallableStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareCall not supported" + ) + + override def getTypeMap: util.Map[String, Class[_]] = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getTypeMap not supported" + ) + + override def setTypeMap(map: util.Map[String, Class[_]]): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setTypeMap not supported" + ) + + override def setHoldability(holdability: Int): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setHoldability not supported" + ) + + override def getHoldability: Int = 0 + + override def setSavepoint(): Savepoint = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setSavepoint not supported" + ) + + override def setSavepoint(name: String): Savepoint = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setSavepoint not supported" + ) + + override def rollback(savepoint: Savepoint): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") + + override def releaseSavepoint(savepoint: Savepoint): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "releaseSavepoint not supported" + ) + + override def createStatement( + resultSetType: Int, + resultSetConcurrency: Int, + resultSetHoldability: Int + ): Statement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createStatement not supported" + ) + + override def prepareStatement( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int, + resultSetHoldability: Int + ): PreparedStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareStatement not supported" + ) + + override def prepareCall( + sql: String, + resultSetType: Int, + resultSetConcurrency: Int, + resultSetHoldability: Int + ): CallableStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareCall not supported" + ) + + override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareStatement not supported" + ) + + override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "prepareStatement not supported" + ) + + override def createClob(): Clob = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createClob not supported" + ) + + override def createBlob(): Blob = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createBlob not supported" + ) + + override def createNClob(): NClob = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createNClob not supported" + ) + + override def createSQLXML(): SQLXML = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createSQLXML not supported" + ) + + override def isValid(timeout: Int): Boolean = true + + override def setClientInfo(name: String, value: String): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setClientInfo not supported" + ) + + override def setClientInfo(properties: Properties): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "properties not supported" + ) + + override def getClientInfo(name: String): String = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getClientInfo not supported" + ) + + override def getClientInfo: Properties = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getClientInfo not supported" + ) + + override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createArrayOf not supported" + ) + + override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "createStruct not supported" + ) + + override def setSchema(schema: String): Unit = throwWhenClosed { + if (StringUtils.isBlank(schema)) { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "schema is empty!") + } + createStatement().execute("use " + schema) + } + + override def getSchema: String = throwWhenClosed { + val resultSet = createStatement().executeQuery("SELECT current_database()") + if (!resultSet.next()) { + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "Get schema failed!") + } + resultSet.getString(1) + } + + override def abort(executor: Executor): Unit = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "abort not supported") + + override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "setNetworkTimeout not supported" + ) + + override def getNetworkTimeout: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "getNetworkTimeout not supported" + ) + + override def unwrap[T](iface: Class[T]): T = + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_CONNECTION, "unwrap not supported") + + override def isWrapperFor(iface: Class[_]): Boolean = + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_CONNECTION, + "isWrapperFor not supported" + ) + + def addLabels(labels: util.Map[String, AnyRef]): Unit = { + labelMap.putAll(labels) + } + + def addStartUpParams(params: util.Map[String, AnyRef]): Unit = { + startupParams.putAll(params) + } + + def addRuntimeParams(params: util.Map[String, AnyRef]): Unit = { + runtimeParams.putAll(params) + } + + def engineToCodeType(engine: String): String = { + val runType = EngineType.mapStringToEngineType(engine) match { + case EngineType.SPARK => RunType.SQL + case EngineType.HIVE => RunType.HIVE + case EngineType.TRINO => RunType.TRINO_SQL + case EngineType.PRESTO => RunType.PRESTO_SQL + case EngineType.ELASTICSEARCH => RunType.ES_SQL + case EngineType.JDBC => RunType.JDBC + case EngineType.PYTHON => RunType.SHELL + case _ => RunType.SQL + } + runType.toString + } + + private[jdbc] def toSubmit(code: String): JobExecuteResult = { + val engineTypeLabel = getEngineType + labelMap.put(LabelKeyConstant.ENGINE_TYPE_KEY, engineTypeLabel.getStringValue) + labelMap.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, s"$user-$creator") + labelMap.put(LabelKeyConstant.CODE_TYPE_KEY, engineToCodeType(engineTypeLabel.getEngineType)) + if (fixedSessionEnabled) { + labelMap.put(LabelKeyConstant.FIXED_EC_KEY, connectionId) + logger.info("Fixed session is enable session id is {}", connectionId) + } + + val jobSubmitAction = JobSubmitAction.builder + .addExecuteCode(code) + .setStartupParams(startupParams) + .setUser(user) + .addExecuteUser(user) + .setLabels(labelMap) + .setRuntimeParams(runtimeParams) + .setVariableMap(variableMap) + .build + + val result = ujesClient.submit(jobSubmitAction) + if (result.getStatus != 0) { + throw new SQLException(result.getMessage) + } + result + } + + override def toString: String = "LinkisConnection_" + connectionId + +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/PreparedData.java b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLException.scala similarity index 64% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/PreparedData.java rename to linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLException.scala index bdde39e23e..9cc3f3814f 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/data/PreparedData.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLException.scala @@ -15,20 +15,23 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.data; +package org.apache.linkis.ujes.jdbc -import org.apache.linkis.cli.common.entity.properties.ClientProperties; +import java.sql.SQLException -import java.util.Map; +class LinkisSQLException(msg: String, code: String, vendorCode: Int) + extends SQLException(msg, code, vendorCode) { -public class PreparedData { - Map propertiesMap; + def this(errorCode: LinkisSQLErrorCode, msg: String) { + this(msg, errorCode.getCode.toString, 0) + } - public PreparedData(Map propertiesMap) { - this.propertiesMap = propertiesMap; + def this(errorCode: LinkisSQLErrorCode) { + this(errorCode.getMsg, errorCode.getCode.toString, 0) } - public Map getPropertiesMap() { - return propertiesMap; + def this(msg: String, code: String) { + this(msg, code, 0) } + } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLPreparedStatement.scala similarity index 77% rename from linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatement.scala rename to linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLPreparedStatement.scala index 6328da99e2..61a7020946 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatement.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLPreparedStatement.scala @@ -36,8 +36,8 @@ import java.sql.{ import java.util import java.util.Calendar -class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String) - extends UJESSQLStatement(ujesSQLConnection) +class LinkisSQLPreparedStatement(ujesSQLConnection: LinkisSQLConnection, sql: String) + extends LinkisSQLStatement(ujesSQLConnection) with PreparedStatement { private val parameters = new util.HashMap[Int, Any] @@ -139,7 +139,7 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setTime(parameterIndex: Int, x: Time): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setTimestamp(parameterIndex: Int, x: Timestamp): Unit = { @@ -147,15 +147,15 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setUnicodeStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def clearParameters(): Unit = { @@ -163,7 +163,7 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setObject(parameterIndex: Int, x: scala.Any, targetSqlType: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setObject(parameterIndex: Int, x: scala.Any): Unit = { @@ -180,8 +180,8 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String case x: Char => setString(parameterIndex, x.toString) case x: Timestamp => setTimestamp(parameterIndex, x) case _ => - throw new UJESSQLException( - UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR, s"Can''t infer the SQL type to use for an instance of ${x.getClass.getName}. Use setObject() with an explicit Types value to specify the type to use" ) } @@ -208,23 +208,23 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setRef(parameterIndex: Int, x: Ref): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setBlob(parameterIndex: Int, x: Blob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setClob(parameterIndex: Int, x: Clob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setArray(parameterIndex: Int, x: java.sql.Array): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def getMetaData: ResultSetMetaData = { @@ -235,15 +235,15 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setDate(parameterIndex: Int, x: Date, cal: Calendar): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setTime(parameterIndex: Int, x: Time, cal: Calendar): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setTimestamp(parameterIndex: Int, x: Timestamp, cal: Calendar): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNull(parameterIndex: Int, sqlType: Int, typeName: String): Unit = { @@ -251,7 +251,7 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setURL(parameterIndex: Int, x: URL): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def getParameterMetaData: ParameterMetaData = { @@ -264,35 +264,35 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String } override def setRowId(parameterIndex: Int, x: RowId): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNString(parameterIndex: Int, value: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNCharacterStream(parameterIndex: Int, value: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNClob(parameterIndex: Int, value: NClob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setBlob(parameterIndex: Int, inputStream: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNClob(parameterIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setSQLXML(parameterIndex: Int, xmlObject: SQLXML): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setObject( @@ -301,47 +301,47 @@ class UJESSQLPreparedStatement(ujesSQLConnection: UJESSQLConnection, sql: String targetSqlType: Int, scaleOrLength: Int ): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setAsciiStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setBinaryStream(parameterIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setCharacterStream(parameterIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setAsciiStream(parameterIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setBinaryStream(parameterIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setCharacterStream(parameterIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNCharacterStream(parameterIndex: Int, value: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setClob(parameterIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setBlob(parameterIndex: Int, inputStream: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def setNClob(parameterIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT) } override def getResultSetType: Int = { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLStatement.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala similarity index 50% rename from linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLStatement.scala rename to linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala index 916bdbaa93..f00d870978 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLStatement.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/LinkisSQLStatement.scala @@ -17,21 +17,22 @@ package org.apache.linkis.ujes.jdbc -import org.apache.linkis.common.exception.ErrorException import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.ujes.client.request.JobExecuteAction -import org.apache.linkis.ujes.client.request.JobExecuteAction.EngineType +import org.apache.linkis.governance.common.entity.ExecutionNodeStatus +import org.apache.linkis.ujes.client.request.OpenLogAction import org.apache.linkis.ujes.client.response.JobExecuteResult import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook -import java.sql.{Connection, ResultSet, SQLWarning, Statement} +import org.apache.commons.lang3.StringUtils + +import java.sql.{Connection, ResultSet, SQLException, SQLWarning, Statement} +import java.util import java.util.concurrent.TimeUnit -import scala.collection.JavaConverters.mapAsJavaMapConverter import scala.concurrent.TimeoutException import scala.concurrent.duration.Duration -class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) +class LinkisSQLStatement(private[jdbc] val ujesSQLConnection: LinkisSQLConnection) extends Statement with Logging { @@ -42,15 +43,20 @@ class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) private var fetchSize = 100 private var queryTimeout = 0 + private var logPath: String = null + private var queryEnd = false + private var logFromLen = 0 + private val logSize = 100 + private[jdbc] def throwWhenClosed[T](op: => T): T = ujesSQLConnection.throwWhenClosed { - if (isClosed) throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED) + if (isClosed) throw new LinkisSQLException(LinkisSQLErrorCode.STATEMENT_CLOSED) else op } override def executeQuery(sql: String): UJESSQLResultSet = { - if (!execute(sql)) throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + if (!execute(sql)) throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) resultSet } @@ -75,13 +81,13 @@ class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) } } - override def getMaxFieldSize: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def getMaxFieldSize: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "getMaxFieldSize not supported" ) - override def setMaxFieldSize(max: Int): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def setMaxFieldSize(max: Int): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "setMaxFieldSize not supported" ) @@ -89,11 +95,12 @@ class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) override def setMaxRows(max: Int): Unit = this.maxRows = max - override def setEscapeProcessing(enable: Boolean): Unit = if (enable) - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def setEscapeProcessing(enable: Boolean): Unit = if (enable) { + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "setEscapeProcessing not supported" ) + } override def getQueryTimeout: Int = queryTimeout @@ -106,90 +113,75 @@ class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) override def clearWarnings(): Unit = {} override def setCursorName(name: String): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "setCursorName not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "setCursorName not supported" + ) - override def execute(sql: String): Boolean = Utils.tryCatch(throwWhenClosed { + override def execute(sql: String): Boolean = throwWhenClosed { var parsedSQL = sql JDBCDriverPreExecutionHook.getPreExecutionHooks.foreach { preExecution => - parsedSQL = preExecution.callPreExecutionHook(parsedSQL) + parsedSQL = preExecution.callPreExecutionHook(parsedSQL, !ujesSQLConnection.isTableau()) } logger.info(s"begin to execute sql ${parsedSQL}") - val action = JobExecuteAction - .builder() - .setEngineType(ujesSQLConnection.getEngineType) - .addExecuteCode(parsedSQL) - .setCreator(ujesSQLConnection.creator) - .setUser(ujesSQLConnection.user) - if (ujesSQLConnection.variableMap.nonEmpty) { - action.setVariableMap(ujesSQLConnection.variableMap.asJava) - } - jobExecuteResult = - Utils.tryCatch(ujesSQLConnection.ujesClient.execute(action.build())) { t: Throwable => - logger.error("UJESClient failed to get result", t) - null - } - // jobExecuteResult = ujesSQLConnection.ujesClient.execute(action.build()) queryEnd = false - var status = ujesSQLConnection.ujesClient.status(jobExecuteResult) - val atMost = - if (queryTimeout > 0) Duration(queryTimeout, TimeUnit.MILLISECONDS) else Duration.Inf - if (!status.isCompleted) Utils.tryThrow { - Utils.waitUntil( - () => { - status = ujesSQLConnection.ujesClient.status(jobExecuteResult) - status.isCompleted || closed - }, - atMost, - 100, - 10000 - ) - } { - case t: TimeoutException => - if (queryTimeout > 0) clearQuery() - new UJESSQLException(UJESSQLErrorCode.QUERY_TIMEOUT, "query has been timeout!").initCause(t) - case t => t - } - if (!closed) { + logPath = null + try { + jobExecuteResult = ujesSQLConnection.toSubmit(parsedSQL) + val atMost = + if (queryTimeout > 0) Duration(queryTimeout, TimeUnit.MILLISECONDS) else Duration.Inf var jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) - if (status.isFailed) - throw new ErrorException( - jobInfo.getRequestPersistTask.getErrCode, - jobInfo.getRequestPersistTask.getErrDesc - ) - val jobInfoStatus = jobInfo.getJobStatus - if (!jobInfoStatus.equals("Succeed")) Utils.tryThrow { - Utils.waitUntil( - () => { - jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) - val state = jobInfo.getJobStatus match { - case "Failed" | "Cancelled" | "Timeout" | "Succeed" => true - case _ => false - } - state || closed - }, - atMost, - 100, - 10000 + logPath = jobInfo.getRequestPersistTask.getLogPath + if (!ExecutionNodeStatus.isCompleted(ExecutionNodeStatus.valueOf(jobInfo.getJobStatus))) { + Utils.tryThrow { + Utils.waitUntil( + () => { + jobInfo = ujesSQLConnection.ujesClient.getJobInfo(jobExecuteResult) + ExecutionNodeStatus.isCompleted( + ExecutionNodeStatus.valueOf(jobInfo.getJobStatus) + ) || closed + }, + atMost, + 100, + 10000 + ) + } { + case t: TimeoutException => + if (queryTimeout > 0) clearQuery() + logPath = jobInfo.getRequestPersistTask.getLogPath + new LinkisSQLException(LinkisSQLErrorCode.QUERY_TIMEOUT, "query has been timeout!") + .initCause(t) + case t => t + } + } + logPath = jobInfo.getRequestPersistTask.getLogPath + if (!ExecutionNodeStatus.isSucceed(ExecutionNodeStatus.valueOf(jobInfo.getJobStatus))) { + throw new LinkisSQLException( + jobInfo.getRequestPersistTask.getErrDesc, + jobInfo.getRequestPersistTask.getErrCode.toString ) - } { - case t: TimeoutException => - if (queryTimeout > 0) clearQuery() - new UJESSQLException(UJESSQLErrorCode.QUERY_TIMEOUT, "query has been timeout!") - .initCause(t) - case t => t } + logger.info(s"end to execute sql ${parsedSQL}") val resultSetList = jobInfo.getResultSetList(ujesSQLConnection.ujesClient) logger.info(s"resultSetList is ${resultSetList.mkString(",")}") - queryEnd = true - if (resultSetList != null) { + if (resultSetList != null && resultSetList.nonEmpty) { resultSet = new UJESSQLResultSet(resultSetList, this, maxRows, fetchSize) true - } else false - } else throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED, "Statement is closed.") - }) { case t: Throwable => - logger.error("任务执行失败", t) - false + } else { + false + } + } catch { + case sqlException: SQLException => + throw sqlException + case throwable: Throwable => + val exception = + new LinkisSQLException(LinkisSQLErrorCode.UNKNOWN_ERROR, throwable.getMessage) + exception.initCause(throwable) + throw exception + } finally { + queryEnd = true + } } def getJobExcuteResult: JobExecuteResult = jobExecuteResult @@ -201,13 +193,12 @@ class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) override def getMoreResults: Boolean = false override def setFetchDirection(direction: Int): Unit = - throwWhenClosed( - if (direction != ResultSet.FETCH_FORWARD) - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, - "only FETCH_FORWARD is supported." - ) - ) + throwWhenClosed(if (direction != ResultSet.FETCH_FORWARD) { + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "only FETCH_FORWARD is supported." + ) + }) override def getFetchDirection: Int = throwWhenClosed(ResultSet.FETCH_FORWARD) @@ -215,88 +206,126 @@ class UJESSQLStatement(private[jdbc] val ujesSQLConnection: UJESSQLConnection) override def getFetchSize: Int = fetchSize - override def getResultSetConcurrency: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def getResultSetConcurrency: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "getResultSetConcurrency not supported." ) override def getResultSetType: Int = throwWhenClosed(ResultSet.TYPE_FORWARD_ONLY) override def addBatch(sql: String): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "addBatch not supported.") + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "addBatch not supported.") override def clearBatch(): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "clearBatch not supported.") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "clearBatch not supported." + ) override def executeBatch(): Array[Int] = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "executeBatch not supported.") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "executeBatch not supported." + ) override def getConnection: Connection = throwWhenClosed(ujesSQLConnection) override def getMoreResults(current: Int): Boolean = false - override def getGeneratedKeys: ResultSet = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def getGeneratedKeys: ResultSet = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "getGeneratedKeys not supported." ) override def executeUpdate(sql: String, autoGeneratedKeys: Int): Int = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "executeUpdate with autoGeneratedKeys not supported." ) override def executeUpdate(sql: String, columnIndexes: Array[Int]): Int = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "executeUpdate with columnIndexes not supported." ) override def executeUpdate(sql: String, columnNames: Array[String]): Int = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "executeUpdate with columnNames not supported." ) override def execute(sql: String, autoGeneratedKeys: Int): Boolean = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "execute with autoGeneratedKeys not supported." ) override def execute(sql: String, columnIndexes: Array[Int]): Boolean = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "execute with columnIndexes not supported." ) override def execute(sql: String, columnNames: Array[String]): Boolean = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "execute with columnNames not supported." ) - override def getResultSetHoldability: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def getResultSetHoldability: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "getResultSetHoldability not supported" ) override def isClosed: Boolean = closed override def setPoolable(poolable: Boolean): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "setPoolable not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, + "setPoolable not supported" + ) override def isPoolable: Boolean = false - override def closeOnCompletion(): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_STATEMENT, + override def closeOnCompletion(): Unit = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "closeOnCompletion not supported" ) override def isCloseOnCompletion: Boolean = false override def unwrap[T](iface: Class[T]): T = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "unwrap not supported") + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_STATEMENT, "unwrap not supported") override def isWrapperFor(iface: Class[_]): Boolean = false + + /** + * log[0] error log[1] warn log[2] info log[3] all (info + warn + error) + * @return + */ + def getAllLog(): Array[String] = { + if (queryEnd && StringUtils.isNotBlank(logPath)) { + val openLogAction = + OpenLogAction.newBuilder().setLogPath(logPath).setProxyUser(ujesSQLConnection.user).build() + ujesSQLConnection.ujesClient.openLog(openLogAction).getLog + } else { + Array.empty[String] + } + } + + /** + * log[0] error log[1] warn log[2] info log[3] all (info + warn + error) + * @return + */ + def getIncrementalLog(): util.List[String] = { + if (null != jobExecuteResult && !queryEnd) { + val logObj = ujesSQLConnection.ujesClient.log(jobExecuteResult, logFromLen, logSize) + logFromLen = logObj.fromLine + logObj.getLog + } else { + new util.ArrayList[String] + } + } + } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala index 60f4d88af5..517f8b07ae 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESClientFactory.scala @@ -35,15 +35,27 @@ object UJESClientFactory extends Logging { def getUJESClient(props: Properties): UJESClient = { val host = props.getProperty(HOST) val port = props.getProperty(PORT) + val user = props.getProperty(USER) val serverUrl = if (StringUtils.isNotBlank(port)) s"http://$host:$port" else "http://" + host - if (ujesClients.containsKey(serverUrl)) ujesClients.get(serverUrl) - else - serverUrl.intern synchronized { - if (ujesClients.containsKey(serverUrl)) return ujesClients.get(serverUrl) + val uniqueKey = s"${serverUrl}_$user" + if (ujesClients.containsKey(uniqueKey)) { + logger.info("Clients with the same JDBC unique key({}) will get it directly", uniqueKey) + ujesClients.get(uniqueKey) + } else { + uniqueKey.intern synchronized { + if (ujesClients.containsKey(uniqueKey)) { + logger.info("Clients with the same JDBC unique key({}) will get it directly", uniqueKey) + return ujesClients.get(uniqueKey) + } + logger.info( + "The same Client does not exist for the JDBC unique key({}), a new Client will be created", + uniqueKey + ) val ujesClient = createUJESClient(serverUrl, props) - ujesClients.put(serverUrl, ujesClient) + ujesClients.put(uniqueKey, ujesClient) ujesClient } + } } private def createUJESClient(serverUrl: String, props: Properties): UJESClient = { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLConnection.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLConnection.scala deleted file mode 100644 index 0d8403c274..0000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLConnection.scala +++ /dev/null @@ -1,340 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc - -import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.ujes.client.UJESClient -import org.apache.linkis.ujes.client.request.JobExecuteAction.EngineType -import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain._ - -import org.apache.commons.lang3.StringUtils - -import java.{sql, util} -import java.sql.{ - Blob, - CallableStatement, - Clob, - Connection, - DatabaseMetaData, - NClob, - PreparedStatement, - ResultSet, - Savepoint, - SQLException, - SQLWarning, - SQLXML, - Statement, - Struct -} -import java.util.Properties -import java.util.concurrent.Executor - -import scala.collection.{mutable, JavaConversions} - -class UJESSQLConnection(private[jdbc] val ujesClient: UJESClient, props: Properties) - extends Connection - with Logging { - private[jdbc] var creator = "IDE" - - private[jdbc] val variableMap = { - val params = props.getProperty(PARAMS) - val map = new mutable.HashMap[String, AnyRef] - if (params != null) { - params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { - case Array(k, v) if k.startsWith(VARIABLE_HEADER) => - map += k.substring(VARIABLE_HEADER.length) -> v - case Array(CREATOR, v) => - creator = v - case _ => - } - } - map.toMap - } - - private[jdbc] val dbName = - if (StringUtils.isNotBlank(props.getProperty(DB_NAME))) props.getProperty(DB_NAME) - else "default" - - private val runningSQLStatements = new util.LinkedList[Statement] - - private var closed = false - - private var inited = false - - private[jdbc] val user = props.getProperty(USER) - - private[jdbc] val serverURL = props.getProperty("URL") - - private val engineTypeMap: mutable.HashMap[String, EngineType] = new mutable.HashMap() - - private[jdbc] def getEngineType: EngineType = { - if (engineTypeMap.isEmpty) { - engineTypeMap.put(EngineType.SPARK.toString, EngineType.SPARK) - engineTypeMap.put(EngineType.HIVE.toString, EngineType.HIVE) - engineTypeMap.put(EngineType.JDBC.toString, EngineType.JDBC) - engineTypeMap.put(EngineType.PYTHON.toString, EngineType.PYTHON) - engineTypeMap.put(EngineType.SHELL.toString, EngineType.SHELL) - engineTypeMap.put(EngineType.PRESTO.toString, EngineType.PRESTO) - } - val engineType: EngineType = EngineType.PRESTO - if (props.containsKey(PARAMS)) { - val params = props.getProperty(PARAMS) - if (params != null & params.length() > 0) { - params.split(PARAM_SPLIT).map(_.split(KV_SPLIT)).foreach { - case Array(k, v) if k.equals(UJESSQLDriver.ENGINE_TYPE) => return engineTypeMap(v) - case _ => - } - } - } - engineType - } - - private[jdbc] def throwWhenClosed[T](op: => T): T = - if (isClosed) throw new UJESSQLException(UJESSQLErrorCode.CONNECTION_CLOSED) - else op - - private def createStatementAndAdd[T <: Statement](op: => T): T = throwWhenClosed { - - val statement = op - runningSQLStatements.add(statement) - if (!inited) { - inited = true - Utils.tryAndWarn(statement.execute(s"USE $dbName")) - } - statement - } - - def getProps: Properties = props - - def removeStatement(statement: UJESSQLStatement): Unit = runningSQLStatements.remove(statement) - - override def createStatement(): Statement = createStatementAndAdd(new UJESSQLStatement(this)) - - override def prepareStatement(sql: String): UJESSQLPreparedStatement = { - val statement = createStatementAndAdd(new UJESSQLPreparedStatement(this, sql)) - statement.clearQuery() - statement - } - - override def createStatement(resultSetType: Int, resultSetConcurrency: Int): Statement = { - if (resultSetConcurrency != ResultSet.CONCUR_READ_ONLY) - throw new SQLException( - "Statement with resultset concurrency " + resultSetConcurrency + " is not supported", - "HYC00" - ) - if (resultSetType == ResultSet.TYPE_SCROLL_SENSITIVE) - throw new SQLException( - "Statement with resultset type " + resultSetType + " is not supported", - "HYC00" - ) - createStatementAndAdd(new UJESSQLStatement(this)) - } - - override def prepareStatement(sql: String, autoGeneratedKeys: Int): PreparedStatement = - prepareStatement(sql) - - override def prepareStatement( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int - ): PreparedStatement = prepareStatement(sql) - - override def getMetaData: DatabaseMetaData = throwWhenClosed(new UJESSQLDatabaseMetaData(this)) - - override def close(): Unit = { - JavaConversions - .asScalaBuffer(runningSQLStatements) - .foreach(statement => Utils.tryQuietly(statement.close())) - closed = true - } - - override def isClosed: Boolean = closed - - override def setReadOnly(readOnly: Boolean): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setReadOnly not supported") - - override def isReadOnly: Boolean = false - - override def setCatalog(catalog: String): Unit = throwWhenClosed() - - override def getCatalog: String = "" - - override def setTransactionIsolation(level: Int): Unit = {} - - override def getTransactionIsolation: Int = Connection.TRANSACTION_NONE - - override def getWarnings: SQLWarning = null - - override def clearWarnings(): Unit = {} - - override def setAutoCommit(autoCommit: Boolean): Unit = {} - - override def getAutoCommit: Boolean = true - - override def commit(): Unit = {} - - override def prepareCall(sql: String): CallableStatement = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "prepareCall not supported") - - override def rollback(): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") - - override def nativeSQL(sql: String): String = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "nativeSQL not supported") - - override def prepareCall( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int - ): CallableStatement = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "prepareCall not supported") - - override def getTypeMap: util.Map[String, Class[_]] = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "getTypeMap not supported") - - override def setTypeMap(map: util.Map[String, Class[_]]): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setTypeMap not supported") - - override def setHoldability(holdability: Int): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "setHoldability not supported" - ) - - override def getHoldability: Int = 0 - - override def setSavepoint(): Savepoint = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setSavepoint not supported") - - override def setSavepoint(name: String): Savepoint = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setSavepoint not supported") - - override def rollback(savepoint: Savepoint): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "rollback not supported") - - override def releaseSavepoint(savepoint: Savepoint): Unit = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "releaseSavepoint not supported" - ) - - override def createStatement( - resultSetType: Int, - resultSetConcurrency: Int, - resultSetHoldability: Int - ): Statement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "createStatement not supported" - ) - - override def prepareStatement( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int, - resultSetHoldability: Int - ): PreparedStatement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "prepareStatement not supported" - ) - - override def prepareCall( - sql: String, - resultSetType: Int, - resultSetConcurrency: Int, - resultSetHoldability: Int - ): CallableStatement = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "prepareCall not supported") - - override def prepareStatement(sql: String, columnIndexes: Array[Int]): PreparedStatement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "prepareStatement not supported" - ) - - override def prepareStatement(sql: String, columnNames: Array[String]): PreparedStatement = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "prepareStatement not supported" - ) - - override def createClob(): Clob = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createClob not supported") - - override def createBlob(): Blob = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createBlob not supported") - - override def createNClob(): NClob = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createNClob not supported") - - override def createSQLXML(): SQLXML = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createSQLXML not supported") - - override def isValid(timeout: Int): Boolean = true - - override def setClientInfo(name: String, value: String): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "setClientInfo not supported") - - override def setClientInfo(properties: Properties): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "properties not supported") - - override def getClientInfo(name: String): String = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "getClientInfo not supported") - - override def getClientInfo: Properties = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "getClientInfo not supported") - - override def createArrayOf(typeName: String, elements: Array[AnyRef]): sql.Array = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createArrayOf not supported") - - override def createStruct(typeName: String, attributes: Array[AnyRef]): Struct = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "createStruct not supported") - - override def setSchema(schema: String): Unit = throwWhenClosed { - if (StringUtils.isBlank(schema)) - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "schema is empty!") - createStatement().execute("use " + schema) - } - - override def getSchema: String = throwWhenClosed { - val resultSet = createStatement().executeQuery("SELECT current_database()") - if (!resultSet.next()) - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_STATEMENT, "Get schema failed!") - resultSet.getString(1) - } - - override def abort(executor: Executor): Unit = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "abort not supported") - - override def setNetworkTimeout(executor: Executor, milliseconds: Int): Unit = - throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "setNetworkTimeout not supported" - ) - - override def getNetworkTimeout: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_CONNECTION, - "getNetworkTimeout not supported" - ) - - override def unwrap[T](iface: Class[T]): T = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "unwrap not supported") - - override def isWrapperFor(iface: Class[_]): Boolean = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_CONNECTION, "isWrapperFor not supported") - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala index f2f0b9a106..25e94c5370 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaData.scala @@ -29,7 +29,7 @@ import java.util import scala.collection.JavaConversions._ -class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) +class UJESSQLDatabaseMetaData(ujesSQLConnection: LinkisSQLConnection) extends DatabaseMetaData with Logging { override def allProceduresAreCallable(): Boolean = false @@ -41,7 +41,8 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getUserName: String = if (ujesSQLConnection.getProps.containsKey("user")) ujesSQLConnection.getProps.getProperty("user") - else throw new UJESSQLException(UJESSQLErrorCode.PARAMS_NOT_FOUND, "Missing user information") + else + throw new LinkisSQLException(LinkisSQLErrorCode.PARAMS_NOT_FOUND, "Missing user information") override def isReadOnly: Boolean = false @@ -88,7 +89,10 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getIdentifierQuoteString: String = " " override def getSQLKeywords: String = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getSQLKeywords not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getSQLKeywords not supported" + ) override def getNumericFunctions: String = "" @@ -162,8 +166,8 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getCatalogTerm: String = "instance" - override def isCatalogAtStart: Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def isCatalogAtStart: Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "isCatalogAtStart not supported" ) @@ -219,103 +223,106 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsOpenStatementsAcrossRollback(): Boolean = false - override def getMaxBinaryLiteralLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxBinaryLiteralLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxBinaryLiteralLength not supported" ) - override def getMaxCharLiteralLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxCharLiteralLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxCharLiteralLength not supported" ) override def getMaxColumnNameLength: Int = 128 - override def getMaxColumnsInGroupBy: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInGroupBy: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInGroupBy not supported" ) - override def getMaxColumnsInIndex: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInIndex: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInIndex not supported" ) - override def getMaxColumnsInOrderBy: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInOrderBy: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInOrderBy not supported" ) - override def getMaxColumnsInSelect: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInSelect: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInSelect not supported" ) - override def getMaxColumnsInTable: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxColumnsInTable: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxColumnsInTable not supported" ) - override def getMaxConnections: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxConnections: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxConnections not supported" ) - override def getMaxCursorNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxCursorNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxCursorNameLength not supported" ) - override def getMaxIndexLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxIndexLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxIndexLength not supported" ) - override def getMaxSchemaNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxSchemaNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxSchemaNameLength not supported" ) - override def getMaxProcedureNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxProcedureNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxProcedureNameLength not supported" ) - override def getMaxCatalogNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxCatalogNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxCatalogNameLength not supported" ) override def getMaxRowSize: Int = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getMaxRowSize not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getMaxRowSize not supported" + ) - override def doesMaxRowSizeIncludeBlobs(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def doesMaxRowSizeIncludeBlobs(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "doesMaxRowSizeIncludeBlobs not supported" ) - override def getMaxStatementLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxStatementLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxStatementLength not supported" ) - override def getMaxStatements: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxStatements: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxStatements not supported" ) - override def getMaxTableNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxTableNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxTableNameLength not supported" ) - override def getMaxTablesInSelect: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxTablesInSelect: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxTablesInSelect not supported" ) - override def getMaxUserNameLength: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getMaxUserNameLength: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getMaxUserNameLength not supported" ) @@ -329,13 +336,13 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsDataManipulationTransactionsOnly(): Boolean = false - override def dataDefinitionCausesTransactionCommit(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def dataDefinitionCausesTransactionCommit(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "dataDefinitionCausesTransactionCommit not supported" ) - override def dataDefinitionIgnoredInTransactions(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def dataDefinitionIgnoredInTransactions(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "dataDefinitionIgnoredInTransactions not supported" ) @@ -358,7 +365,9 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) tableNamePattern: String, types: Array[String] ): ResultSet = { - val resultCatalog = if (StringUtils.isNotBlank(catalog)) { + val resultCatalog = if (StringUtils.isNotBlank(schemaPattern)) { + schemaPattern + } else if (StringUtils.isNotBlank(catalog)) { catalog } else { s"${getUserName}_ind" @@ -374,11 +383,16 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) if (table.get("isView").asInstanceOf[Boolean]) TableType.VIEW.name() else TableType.TABLE.name() val resultTable = new util.HashMap[String, String]() + val tableName = table.get("tableName").asInstanceOf[String] resultTable.put("catalog", resultCatalog) - resultTable.put("tableName", table.get("tableName").asInstanceOf[String]) + resultTable.put("tableName", tableName) resultTable.put("tableType", tableType) if (null == types || types.contains(tableType)) { - resultTables.add(resultTable) + if ( + StringUtils.isNotBlank(tableNamePattern) && tableNamePattern.equalsIgnoreCase(tableName) + ) { + resultTables.add(resultTable) + } } } val resultSet: LinkisMetaDataResultSet[util.Map[String, String]] = @@ -471,7 +485,9 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) tableNamePattern: String, columnNamePattern: String ): ResultSet = { - val resultCatalog = if (StringUtils.isNotBlank(catalog)) { + val resultCatalog = if (StringUtils.isNotBlank(schemaPattern)) { + schemaPattern + } else if (StringUtils.isNotBlank(catalog)) { catalog } else { s"${getUserName}_ind" @@ -670,48 +686,48 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsResultSetConcurrency(`type`: Int, concurrency: Int): Boolean = false - override def ownUpdatesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def ownUpdatesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "ownUpdatesAreVisible not supported" ) - override def ownDeletesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def ownDeletesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "ownDeletesAreVisible not supported" ) - override def ownInsertsAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def ownInsertsAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "ownInsertsAreVisible not supported" ) - override def othersUpdatesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def othersUpdatesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "othersUpdatesAreVisible not supported" ) - override def othersDeletesAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def othersDeletesAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "othersDeletesAreVisible not supported" ) - override def othersInsertsAreVisible(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def othersInsertsAreVisible(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "othersInsertsAreVisible not supported" ) - override def updatesAreDetected(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def updatesAreDetected(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "updatesAreDetected not supported" ) - override def deletesAreDetected(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def deletesAreDetected(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "deletesAreDetected not supported" ) - override def insertsAreDetected(`type`: Int): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def insertsAreDetected(`type`: Int): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "insertsAreDetected not supported" ) @@ -739,14 +755,20 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) schemaPattern: String, typeNamePattern: String ): ResultSet = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getSuperTypes not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getSuperTypes not supported" + ) override def getSuperTables( catalog: String, schemaPattern: String, tableNamePattern: String ): ResultSet = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getSuperTables not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getSuperTables not supported" + ) override def getAttributes( catalog: String, @@ -754,12 +776,15 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) typeNamePattern: String, attributeNamePattern: String ): ResultSet = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "getAttributes not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "getAttributes not supported" + ) override def supportsResultSetHoldability(holdability: Int): Boolean = false - override def getResultSetHoldability: Int = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getResultSetHoldability: Int = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getResultSetHoldability not supported" ) @@ -773,15 +798,15 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def getSQLStateType: Int = 2 - override def locatorsUpdateCopy(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def locatorsUpdateCopy(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "locatorsUpdateCopy not supported" ) override def supportsStatementPooling(): Boolean = false - override def getRowIdLifetime: RowIdLifetime = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getRowIdLifetime: RowIdLifetime = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getRowIdLifetime not supported" ) @@ -794,13 +819,13 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) override def supportsStoredFunctionsUsingCallSyntax(): Boolean = false - override def autoCommitFailureClosesAllResultSets(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def autoCommitFailureClosesAllResultSets(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "autoCommitFailureClosesAllResultSets not supported" ) - override def getClientInfoProperties: ResultSet = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def getClientInfoProperties: ResultSet = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "getClientInfoProperties not supported" ) @@ -824,15 +849,18 @@ class UJESSQLDatabaseMetaData(ujesSQLConnection: UJESSQLConnection) columnNamePattern: String ): ResultSet = null - override def generatedKeyAlwaysReturned(): Boolean = throw new UJESSQLException( - UJESSQLErrorCode.NOSUPPORT_METADATA, + override def generatedKeyAlwaysReturned(): Boolean = throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, "generatedKeyAlwaysReturned not supported" ) override def unwrap[T](iface: Class[T]): T = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "unwrap not supported") + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA, "unwrap not supported") override def isWrapperFor(iface: Class[_]): Boolean = - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA, "isWrapperFor not supported") + throw new LinkisSQLException( + LinkisSQLErrorCode.NOSUPPORT_METADATA, + "isWrapperFor not supported" + ) } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala index 783713cf40..ab2f6dda10 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLDriverMain.scala @@ -29,10 +29,10 @@ import java.sql.{ DriverPropertyInfo, SQLFeatureNotSupportedException } -import java.util.Properties +import java.util.{Locale, Properties} import java.util.logging.Logger -import scala.collection.JavaConversions +import scala.collection.JavaConverters._ class UJESSQLDriverMain extends Driver with Logging { @@ -41,8 +41,10 @@ class UJESSQLDriverMain extends Driver with Logging { props.putAll(parseURL(url)) logger.info(s"input url:$url, properties:$properties") val ujesClient = UJESClientFactory.getUJESClient(props) - new UJESSQLConnection(ujesClient, props) - } else throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url: " + url) + new LinkisSQLConnection(ujesClient, props) + } else { + null + } override def acceptsURL(url: String): Boolean = url.startsWith(URL_PREFIX) @@ -70,20 +72,22 @@ class UJESSQLDriverMain extends Driver with Logging { case Array(TOKEN_VALUE, value) => props.setProperty(TOKEN_VALUE, value) false - case Array(LIMIT, value) => - props.setProperty(LIMIT, value) - UJESSQLDriverMain.LIMIT_ENABLED = value.toLowerCase() + case Array(FIXED_SESSION, value) => + props.setProperty(FIXED_SESSION, value) false case Array(key, _) => if (StringUtils.isBlank(key)) { - throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url for params: " + url) + throw new LinkisSQLException( + LinkisSQLErrorCode.BAD_URL, + "bad url for params: " + url + ) } else true case _ => - throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url for params: " + url) + throw new LinkisSQLException(LinkisSQLErrorCode.BAD_URL, "bad url for params: " + url) } props.setProperty(PARAMS, kvs.map(_.mkString(KV_SPLIT)).mkString(PARAM_SPLIT)) } - case _ => throw new UJESSQLException(UJESSQLErrorCode.BAD_URL, "bad url: " + url) + case _ => throw new LinkisSQLException(LinkisSQLErrorCode.BAD_URL, "bad url: " + url) } props } @@ -133,8 +137,7 @@ object UJESSQLDriverMain { val TOKEN_VALUE = UJESSQLDriver.TOKEN_VALUE val PASSWORD = UJESSQLDriver.PASSWORD val TABLEAU_SERVER = UJESSQLDriver.TABLEAU_SERVER - val LIMIT = UJESSQLDriver.LIMIT - var LIMIT_ENABLED = UJESSQLDriver.LIMIT_ENABLED + val FIXED_SESSION = UJESSQLDriver.FIXED_SESSION val VERSION = UJESSQLDriver.VERSION val DEFAULT_VERSION = UJESSQLDriver.DEFAULT_VERSION @@ -144,14 +147,15 @@ object UJESSQLDriverMain { val ENABLE_LOADBALANCER = UJESSQLDriver.ENABLE_LOADBALANCER val CREATOR = UJESSQLDriver.CREATOR + val TABLEAU = UJESSQLDriver.TABLEAU + val VARIABLE_HEADER = UJESSQLDriver.VARIABLE_HEADER def getConnectionParams( connectionParams: String, variableMap: java.util.Map[String, Any] ): String = { - val variables = JavaConversions - .mapAsScalaMap(variableMap) + val variables = variableMap.asScala .map(kv => VARIABLE_HEADER + kv._1 + KV_SPLIT + kv._2) .mkString(PARAM_SPLIT) if (StringUtils.isNotBlank(connectionParams)) connectionParams + PARAM_SPLIT + variables @@ -179,17 +183,20 @@ object UJESSQLDriverMain { ): String = { val sb = new StringBuilder if (StringUtils.isNotBlank(version)) sb.append(VERSION).append(KV_SPLIT).append(version) - if (maxConnectionSize > 0) + if (maxConnectionSize > 0) { sb.append(PARAM_SPLIT).append(MAX_CONNECTION_SIZE).append(KV_SPLIT).append(maxConnectionSize) - if (readTimeout > 0) + } + if (readTimeout > 0) { sb.append(PARAM_SPLIT).append(READ_TIMEOUT).append(KV_SPLIT).append(readTimeout) + } if (enableDiscovery) { sb.append(PARAM_SPLIT).append(ENABLE_DISCOVERY).append(KV_SPLIT).append(enableDiscovery) - if (enableLoadBalancer) + if (enableLoadBalancer) { sb.append(PARAM_SPLIT) .append(ENABLE_LOADBALANCER) .append(KV_SPLIT) .append(enableLoadBalancer) + } } if (sb.startsWith(PARAM_SPLIT)) sb.toString.substring(PARAM_SPLIT.length) else sb.toString } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLException.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLException.scala deleted file mode 100644 index 25db6f9381..0000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLException.scala +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc - -import org.apache.linkis.common.exception.ErrorException - -class UJESSQLException(errorCode: UJESSQLErrorCode) - extends ErrorException(errorCode.getCode, errorCode.getMsg) { - - def this(errorCode: UJESSQLErrorCode, msg: String) { - this(errorCode) - setErrCode(errorCode.getCode) - setDesc(msg) - } - - /** - * add to deal with errorinfo derived from jobInfo - * @param errorCode - * @param msg - */ - def this(errorCode: Int, msg: String) { - this(UJESSQLErrorCode.ERRORINFO_FROM_JOBINFO) - setDesc(msg) - setErrCode(errorCode) - } - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala index b8cf1b23b1..0ed47925c6 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSet.scala @@ -42,7 +42,7 @@ import java.sql.{ Time, Timestamp } -import java.util.Calendar +import java.util.{Calendar, Locale} import org.joda.time.DateTimeZone import org.joda.time.format.{ @@ -54,7 +54,7 @@ import org.joda.time.format.{ class UJESSQLResultSet( resultSetList: Array[String], - ujesStatement: UJESSQLStatement, + ujesStatement: LinkisSQLStatement, maxRows: Int, fetchSize: Int ) extends ResultSet @@ -75,10 +75,10 @@ class UJESSQLResultSet( private val pageSize: Int = 5000 private var path: String = _ private var metaData: util.List[util.Map[String, String]] = _ - private val statement: UJESSQLStatement = ujesStatement + private val statement: LinkisSQLStatement = ujesStatement - private val connection: UJESSQLConnection = - ujesStatement.getConnection.asInstanceOf[UJESSQLConnection] + private val connection: LinkisSQLConnection = + ujesStatement.getConnection.asInstanceOf[LinkisSQLConnection] private var valueWasNull: Boolean = false private var warningChain: SQLWarning = _ @@ -96,6 +96,10 @@ class UJESSQLResultSet( .toFormatter .withOffsetParsed + private val STRING_TYPE = "string" + + private val NULL_VALUE = "NULL" + private def getResultSetPath(resultSetList: Array[String]): String = { if (resultSetList.length > 0) { resultSetList(resultSetList.length - 1) @@ -189,7 +193,7 @@ class UJESSQLResultSet( if (metaData == null) init() currentRowCursor += 1 if (null == resultSetRow || currentRowCursor > resultSetRow.size() - 1) { - if (UJESSQLDriverMain.LIMIT_ENABLED.equals("false") && !isCompleted) { + if (!isCompleted) { updateResultSet() if (isCompleted) { return false @@ -229,12 +233,16 @@ class UJESSQLResultSet( } private def evaluate(dataType: String, value: String): Any = { + if (value == null || value.equals("null") || value.equals("NULL") || value.equals("Null")) { - value + dataType.toLowerCase(Locale.getDefault) match { + case "string" | "char" | "varchar" | "nvarchar" => value + case _ => null + } } else { - dataType.toLowerCase match { - case null => throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) - case "string" => value.toString + dataType.toLowerCase(Locale.getDefault) match { + case null => throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) + case "char" | "varchar" | "nvarchar" | "string" => value case "short" => value.toShort case "int" => value.toInt case "long" => value.toLong @@ -242,18 +250,15 @@ class UJESSQLResultSet( case "double" => value.toDouble case "boolean" => value.toBoolean case "byte" => value.toByte - case "char" => value.toString - case "timestamp" => value.toString - case "varchar" => value.toString - case "nvarchar" => value.toString - case "date" => value.toString + case "timestamp" => value + case "date" => value case "bigint" => value.toLong case "decimal" => value.toDouble case "array" => value.toArray case "map" => value case _ => - throw new UJESSQLException( - UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR, s"Can't infer the SQL type to use for an instance of ${dataType}. Use getObject() with an explicit Types value to specify the type to use" ) } @@ -262,15 +267,15 @@ class UJESSQLResultSet( private def getColumnValue(columnIndex: Int): Any = { if (currentRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "No row found.") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "No row found.") } else if (currentRow.size() <= 0) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, "RowSet does not contain any columns!" ) } else if (columnIndex > currentRow.size()) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, s" Invalid columnIndex: ${columnIndex}" ) } else { @@ -285,7 +290,7 @@ class UJESSQLResultSet( override def getString(columnIndex: Int): String = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case c: Character => Character.toString(c) @@ -297,7 +302,7 @@ class UJESSQLResultSet( override def getBoolean(columnIndex: Int): Boolean = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case s: String => @@ -313,7 +318,7 @@ class UJESSQLResultSet( override def getByte(columnIndex: Int): Byte = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Byte] } @@ -322,7 +327,7 @@ class UJESSQLResultSet( override def getShort(columnIndex: Int): Short = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Short] } @@ -331,7 +336,7 @@ class UJESSQLResultSet( override def getInt(columnIndex: Int): Int = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case i: Integer => i.asInstanceOf[Int] @@ -344,7 +349,7 @@ class UJESSQLResultSet( override def getLong(columnIndex: Int): Long = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case i: Integer => i.longValue() @@ -357,7 +362,7 @@ class UJESSQLResultSet( override def getFloat(columnIndex: Int): Float = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Float] } @@ -366,7 +371,7 @@ class UJESSQLResultSet( override def getDouble(columnIndex: Int): Double = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case _: String => 0.0d @@ -379,7 +384,7 @@ class UJESSQLResultSet( val mc = new MathContext(scale) val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any match { case double: Double => new java.math.BigDecimal(double).round(mc) @@ -393,7 +398,7 @@ class UJESSQLResultSet( override def getBytes(columnIndex: Int): Array[Byte] = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Array[Byte]] } @@ -404,35 +409,35 @@ class UJESSQLResultSet( val any = getColumnValue(columnIndex) logger.info(s"the value of Date is $any") if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Date] } } override def getTime(columnIndex: Int): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getTimestamp(columnIndex: Int): Timestamp = { val any = getColumnValue(columnIndex) if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else { any.asInstanceOf[Timestamp] } } override def getAsciiStream(columnIndex: Int): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getUnicodeStream(columnIndex: Int): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBinaryStream(columnIndex: Int): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getString(columnLabel: String): String = { @@ -480,7 +485,7 @@ class UJESSQLResultSet( } override def getTime(columnLabel: String): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getTimestamp(columnLabel: String): Timestamp = { @@ -488,15 +493,15 @@ class UJESSQLResultSet( } override def getAsciiStream(columnLabel: String): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getUnicodeStream(columnLabel: String): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBinaryStream(columnLabel: String): InputStream = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getWarnings: SQLWarning = { @@ -508,7 +513,7 @@ class UJESSQLResultSet( } override def getCursorName: String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getMetaData: UJESSQLResultSetMetaData = { @@ -518,11 +523,7 @@ class UJESSQLResultSet( override def getObject(columnIndex: Int): Object = { val any = getColumnValue(columnIndex) - if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") - } else { - any.asInstanceOf[Object] - } + any.asInstanceOf[Object] } override def getObject(columnLabel: String): Object = { @@ -542,19 +543,19 @@ class UJESSQLResultSet( } } if (columnIndex == -1) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, s"can not find column: ${columnLabel}" ) } else columnIndex } override def getCharacterStream(columnIndex: Int): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getCharacterStream(columnLabel: String): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBigDecimal(columnIndex: Int): java.math.BigDecimal = { @@ -567,31 +568,31 @@ class UJESSQLResultSet( override def isBeforeFirst: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor == -1 } override def isAfterLast: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor > resultSetRow.size() - 1 } override def isFirst: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor == 0 } override def isLast: Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else currentRowCursor == resultSetRow.size() - 1 } override def beforeFirst(): Unit = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else { currentRowCursor = -1 updateCurrentRow(currentRowCursor) @@ -600,7 +601,7 @@ class UJESSQLResultSet( override def afterLast(): Unit = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else { currentRowCursor = resultSetRow.size() updateCurrentRow(currentRowCursor) @@ -627,7 +628,7 @@ class UJESSQLResultSet( override def getRow: Int = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else { currentRowCursor + 1 } @@ -635,10 +636,10 @@ class UJESSQLResultSet( override def absolute(row: Int): Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else if (row > resultSetRow.size()) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, "The specified number of rows is greater than the maximum number of rows" ) } else { @@ -654,10 +655,10 @@ class UJESSQLResultSet( override def relative(rows: Int): Boolean = { if (resultSetRow == null) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_NULL) + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_NULL) } else if (rows > resultSetRow.size()) { - throw new UJESSQLException( - UJESSQLErrorCode.RESULTSET_ROWERROR, + throw new LinkisSQLException( + LinkisSQLErrorCode.RESULTSET_ROWERROR, "The specified number of rows is greater than the maximum number of rows" ) } else { @@ -679,11 +680,11 @@ class UJESSQLResultSet( } override def setFetchDirection(direction: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getFetchDirection: Int = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def setFetchSize(rows: Int): Unit = { @@ -703,248 +704,248 @@ class UJESSQLResultSet( } override def rowUpdated(): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def rowInserted(): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def rowDeleted(): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNull(columnIndex: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBoolean(columnIndex: Int, x: Boolean): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateByte(columnIndex: Int, x: Byte): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateShort(columnIndex: Int, x: Short): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateInt(columnIndex: Int, x: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateLong(columnIndex: Int, x: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateFloat(columnIndex: Int, x: Float): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDouble(columnIndex: Int, x: Double): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBigDecimal(columnIndex: Int, x: java.math.BigDecimal): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateString(columnIndex: Int, x: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBytes(columnIndex: Int, x: Array[Byte]): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDate(columnIndex: Int, x: Date): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTime(columnIndex: Int, x: Time): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTimestamp(columnIndex: Int, x: Timestamp): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnIndex: Int, x: Reader, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnIndex: Int, x: scala.Any, scaleOrLength: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnIndex: Int, x: scala.Any): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNull(columnLabel: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBoolean(columnLabel: String, x: Boolean): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateByte(columnLabel: String, x: Byte): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateShort(columnLabel: String, x: Short): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateInt(columnLabel: String, x: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateLong(columnLabel: String, x: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateFloat(columnLabel: String, x: Float): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDouble(columnLabel: String, x: Double): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBigDecimal(columnLabel: String, x: java.math.BigDecimal): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateString(columnLabel: String, x: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBytes(columnLabel: String, x: Array[Byte]): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateDate(columnLabel: String, x: Date): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTime(columnLabel: String, x: Time): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateTimestamp(columnLabel: String, x: Timestamp): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnLabel: String, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnLabel: String, x: InputStream, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnLabel: String, reader: Reader, length: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnLabel: String, x: scala.Any, scaleOrLength: Int): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateObject(columnLabel: String, x: scala.Any): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def insertRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def deleteRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def refreshRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def cancelRowUpdates(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def moveToInsertRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def moveToCurrentRow(): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getStatement: Statement = { if (statement != null && !hasClosed) { statement.asInstanceOf[Statement] - } else throw new UJESSQLException(UJESSQLErrorCode.STATEMENT_CLOSED) + } else throw new LinkisSQLException(LinkisSQLErrorCode.STATEMENT_CLOSED) } override def getObject(columnIndex: Int, map: util.Map[String, Class[_]]): AnyRef = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRef(columnIndex: Int): Ref = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBlob(columnIndex: Int): Blob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getClob(columnIndex: Int): Clob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getArray(columnIndex: Int): sql.Array = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getObject(columnLabel: String, map: util.Map[String, Class[_]]): AnyRef = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRef(columnLabel: String): Ref = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getBlob(columnLabel: String): Blob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getClob(columnLabel: String): Clob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getArray(columnLabel: String): sql.Array = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } private def getDate(columnIndex: Int, localTimeZone: DateTimeZone): Date = { val value = getColumnValue(columnIndex) logger.info(s"the value of value is $value and the value of localTimeZone is $localTimeZone") if (wasNull()) { - throw new UJESSQLException(UJESSQLErrorCode.RESULTSET_ROWERROR, "Type is null") + throw new LinkisSQLException(LinkisSQLErrorCode.RESULTSET_ROWERROR, "Type is null") } else new Date(DATE_FORMATTER.withZone(localTimeZone).parseMillis(String.valueOf(value))); } @@ -958,11 +959,11 @@ class UJESSQLResultSet( } override def getTime(columnIndex: Int, cal: Calendar): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getTime(columnLabel: String, cal: Calendar): Time = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } private def getTimestamp(columnIndex: Int, localTimeZone: DateTimeZone): Timestamp = { @@ -970,8 +971,9 @@ class UJESSQLResultSet( logger.info(s"the value of value is $value and the value of localTimeZone is $localTimeZone") if (wasNull()) { null - } else + } else { new Timestamp(TIMESTAMP_FORMATTER.withZone(localTimeZone).parseMillis(String.valueOf(value))) + } } override def getTimestamp(columnIndex: Int, cal: Calendar): Timestamp = { @@ -986,63 +988,63 @@ class UJESSQLResultSet( } override def getURL(columnIndex: Int): URL = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getURL(columnLabel: String): URL = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRef(columnIndex: Int, x: Ref): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRef(columnLabel: String, x: Ref): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnIndex: Int, x: Blob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnLabel: String, x: Blob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnIndex: Int, x: Clob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnLabel: String, x: Clob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateArray(columnIndex: Int, x: sql.Array): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateArray(columnLabel: String, x: sql.Array): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRowId(columnIndex: Int): RowId = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getRowId(columnLabel: String): RowId = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRowId(columnIndex: Int, x: RowId): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateRowId(columnLabel: String, x: RowId): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getHoldability: Int = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def isClosed: Boolean = { @@ -1050,187 +1052,187 @@ class UJESSQLResultSet( } override def updateNString(columnIndex: Int, nString: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNString(columnLabel: String, nString: String): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnIndex: Int, nClob: NClob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnLabel: String, nClob: NClob): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNClob(columnIndex: Int): NClob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNClob(columnLabel: String): NClob = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getSQLXML(columnIndex: Int): SQLXML = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getSQLXML(columnLabel: String): SQLXML = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateSQLXML(columnIndex: Int, xmlObject: SQLXML): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateSQLXML(columnLabel: String, xmlObject: SQLXML): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNString(columnIndex: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNString(columnLabel: String): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNCharacterStream(columnIndex: Int): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getNCharacterStream(columnLabel: String): Reader = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnIndex: Int, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnIndex: Int, x: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnLabel: String, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnLabel: String, x: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnIndex: Int, inputStream: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnLabel: String, inputStream: InputStream, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnIndex: Int, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnLabel: String, reader: Reader, length: Long): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnIndex: Int, x: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNCharacterStream(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnIndex: Int, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnIndex: Int, x: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateAsciiStream(columnLabel: String, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBinaryStream(columnLabel: String, x: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateCharacterStream(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnIndex: Int, inputStream: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateBlob(columnLabel: String, inputStream: InputStream): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateClob(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnIndex: Int, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def updateNClob(columnLabel: String, reader: Reader): Unit = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getObject[T](columnIndex: Int, `type`: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def getObject[T](columnLabel: String, `type`: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def unwrap[T](iface: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } override def isWrapperFor(iface: Class[_]): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_RESULTSET) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_RESULTSET) } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala index e51309e885..eaeb1b25d8 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLResultSetMetaData.scala @@ -33,19 +33,19 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { private[jdbc] def setColumnNameProperties(column: Int, columnName: String): Unit = { if (column != null && columnName != null) { columnNameProperties.put(column, columnName) - } else throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + } else throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } private[jdbc] def setDataTypeProperties(column: Int, columnName: String): Unit = { if (column != null && columnName != null) { dataTypeProperties.put(column, columnName) - } else throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + } else throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } private[jdbc] def setCommentPropreties(column: Int, columnName: String): Unit = { if (column != null && columnName != null) { commentProperties.put(column, columnName) - } else throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + } else throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } override def getColumnCount: Int = { @@ -53,7 +53,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def isAutoIncrement(column: Int): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def isCaseSensitive(column: Int): Boolean = true @@ -74,7 +74,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { override def getColumnLabel(column: Int): String = { if (columnNameProperties.get(column) == null) { - throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } else columnNameProperties.get(column) } @@ -83,7 +83,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def getSchemaName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getPrecision(column: Int): Int = { @@ -102,11 +102,11 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def getTableName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getCatalogName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getColumnType(column: Int): Int = { @@ -115,7 +115,7 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { override def getColumnTypeName(column: Int): String = { if (dataTypeProperties.get(column) == null) { - throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) } else dataTypeProperties.get(column) } @@ -128,19 +128,19 @@ class UJESSQLResultSetMetaData extends ResultSetMetaData with Logging { } override def isDefinitelyWritable(column: Int): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def getColumnClassName(column: Int): String = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def unwrap[T](iface: Class[T]): T = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } override def isWrapperFor(iface: Class[_]): Boolean = { - throw new UJESSQLException(UJESSQLErrorCode.NOSUPPORT_METADATA) + throw new LinkisSQLException(LinkisSQLErrorCode.NOSUPPORT_METADATA) } protected def toZeroIndex(column: Int): Int = { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala index ba4cd3878f..d8de812a1b 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/UJESSQLTypeParser.scala @@ -18,12 +18,14 @@ package org.apache.linkis.ujes.jdbc import java.sql.{SQLException, Timestamp, Types} +import java.util.Locale object UJESSQLTypeParser { def parserFromName(typeName: String): Int = { - typeName.toLowerCase match { - case null => throw new UJESSQLException(UJESSQLErrorCode.METADATA_EMPTY) + val typeNameLowerCase = typeName.toLowerCase(Locale.getDefault()) + typeName.toLowerCase() match { + case null => throw new LinkisSQLException(LinkisSQLErrorCode.METADATA_EMPTY) case "string" => Types.NVARCHAR case "short" => Types.SMALLINT case "int" => Types.INTEGER @@ -43,7 +45,12 @@ object UJESSQLTypeParser { case "bigint" => Types.BIGINT case "array" => Types.ARRAY case "map" => Types.JAVA_OBJECT - case _ => throw new SQLException(s"parameter type error,Type:$typeName") + case _ => + if (typeNameLowerCase.startsWith("decimal")) { + Types.DECIMAL + } else { + Types.NVARCHAR + } } } @@ -60,7 +67,7 @@ object UJESSQLTypeParser { case _: Char => Types.CHAR case _: BigDecimal => Types.DECIMAL case _: Timestamp => Types.TIMESTAMP - case _ => throw new UJESSQLException(UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR) + case _ => throw new LinkisSQLException(LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR) } } @@ -80,7 +87,7 @@ object UJESSQLTypeParser { case Types.VARCHAR => "varchar" case Types.NVARCHAR => "string" case Types.DATE => "date" - case _ => throw new UJESSQLException(UJESSQLErrorCode.PREPARESTATEMENT_TYPEERROR) + case _ => throw new LinkisSQLException(LinkisSQLErrorCode.PREPARESTATEMENT_TYPEERROR) } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala index 038ff38712..c7de7d3734 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/JDBCDriverPreExecutionHook.scala @@ -24,7 +24,7 @@ import scala.collection.mutable.ArrayBuffer trait JDBCDriverPreExecutionHook { - def callPreExecutionHook(sql: String): String + def callPreExecutionHook(sql: String, skip: Boolean): String } @@ -34,8 +34,7 @@ object JDBCDriverPreExecutionHook extends Logging { val hooks = new ArrayBuffer[JDBCDriverPreExecutionHook]() CommonVars( "wds.linkis.jdbc.pre.hook.class", - "org.apache.linkis.ujes.jdbc.hook.impl.TableauPreExecutionHook," + - "org.apache.linkis.ujes.jdbc.hook.impl.NoLimitExecutionHook" + "org.apache.linkis.ujes.jdbc.hook.impl.TableauPreExecutionHook" ).getValue.split(",") foreach { hookStr => Utils.tryCatch { val clazz = Class.forName(hookStr.trim) @@ -51,5 +50,5 @@ object JDBCDriverPreExecutionHook extends Logging { hooks.toArray } - def getPreExecutionHooks = preExecutionHooks + def getPreExecutionHooks: Array[JDBCDriverPreExecutionHook] = preExecutionHooks } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/NoLimitExecutionHook.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/NoLimitExecutionHook.scala deleted file mode 100644 index 18f94c0fde..0000000000 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/NoLimitExecutionHook.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.ujes.jdbc.hook.impl - -import org.apache.linkis.ujes.jdbc.UJESSQLDriverMain -import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook - -class NoLimitExecutionHook extends JDBCDriverPreExecutionHook { - - override def callPreExecutionHook(sql: String): String = { - if (UJESSQLDriverMain.LIMIT_ENABLED.toLowerCase.equals("false")) { - var noLimitSql = "--set ide.engine.no.limit.allow=true\n" + sql - val lowerCaseLimitSql = noLimitSql.toLowerCase() - if (lowerCaseLimitSql.contains("limit ") && lowerCaseLimitSql.contains("tableausql")) { - val lastIndexOfLimit = lowerCaseLimitSql.lastIndexOf("limit ") - noLimitSql = noLimitSql.substring(0, lastIndexOfLimit) - } - noLimitSql - } else { - sql - } - - } - -} diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala index 63c4f53c9f..ec177b4de7 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala +++ b/linkis-computation-governance/linkis-jdbc-driver/src/main/scala/org/apache/linkis/ujes/jdbc/hook/impl/TableauPreExecutionHook.scala @@ -21,7 +21,10 @@ import org.apache.linkis.ujes.jdbc.hook.JDBCDriverPreExecutionHook class TableauPreExecutionHook extends JDBCDriverPreExecutionHook { - override def callPreExecutionHook(sql: String): String = { + override def callPreExecutionHook(sql: String, skip: Boolean): String = { + if (skip) { + return sql + } if ( sql.contains("CREATE INDEX") || sql .contains("CREATE TABLE") || sql.contains("INSERT INTO") || sql.contains("DROP TABLE") diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java index 51d9e9953a..ca2c1e35fd 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/CreateConnection.java @@ -27,12 +27,12 @@ public class CreateConnection { - private static UJESSQLConnection conn; + private static LinkisSQLConnection conn; - public static UJESSQLConnection getConnection() throws ClassNotFoundException, SQLException { + public static LinkisSQLConnection getConnection() throws ClassNotFoundException, SQLException { Class.forName("org.apache.linkis.ujes.jdbc.UJESSQLDriver"); conn = - (UJESSQLConnection) + (LinkisSQLConnection) DriverManager.getConnection("jdbc:linkis://hostname:port", "username", "password"); return conn; } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java index 427a6b5fc4..3e76bb2856 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/JDBCSpiTest.java @@ -29,21 +29,21 @@ * */ public class JDBCSpiTest { - private static UJESSQLConnection conn; + private static LinkisSQLConnection conn; - public static UJESSQLConnection getConnection() throws ClassNotFoundException, SQLException { + public static LinkisSQLConnection getConnection() throws ClassNotFoundException, SQLException { Class.forName("org.apache.linkis.ujes.jdbc.UJESSQLDriver"); conn = - (UJESSQLConnection) - DriverManager.getConnection("jdbc:linkis://hostname:port", "root", "123456"); + (LinkisSQLConnection) + DriverManager.getConnection("jdbc:linkis://127.0.0.1:9001", "root", "123456"); return conn; } @Test public void spiTest() { try { - UJESSQLConnection conn = - (UJESSQLConnection) + LinkisSQLConnection conn = + (LinkisSQLConnection) DriverManager.getConnection("jdbc:linkis://hostname:port", "username", "password"); Assertions.assertNotNull(conn); } catch (SQLException e) { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLStatementTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java similarity index 95% rename from linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLStatementTest.java rename to linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java index e674dd5b23..3ebd21ae70 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLStatementTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/LinkisSQLStatementTest.java @@ -33,9 +33,9 @@ * if you want to test this module,you must rewrite default parameters and SQL we used for local test * */ -public class UJESSQLStatementTest { - private static UJESSQLConnection conn; - private static UJESSQLStatement statement; +public class LinkisSQLStatementTest { + private static LinkisSQLConnection conn; + private static LinkisSQLStatement statement; private static int maxRows; private static int queryTimeout; private static String sql; @@ -48,7 +48,7 @@ public class UJESSQLStatementTest { public static void createConnection() { try { conn = CreateConnection.getConnection(); - statement = (UJESSQLStatement) conn.createStatement(); + statement = (LinkisSQLStatement) conn.createStatement(); } catch (ClassNotFoundException e) { e.printStackTrace(); } catch (SQLException e) { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java index 0dab63b3ff..100e13f3e4 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLDatabaseMetaDataTest.java @@ -34,7 +34,7 @@ import static org.junit.jupiter.api.Assertions.assertTrue; public class UJESSQLDatabaseMetaDataTest { - private static UJESSQLConnection conn; + private static LinkisSQLConnection conn; private static UJESSQLDatabaseMetaData dbmd; @BeforeAll @@ -54,42 +54,43 @@ public static void preWork() { @Test public void supportsMinimumSQLGrammar() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsMinimumSQLGrammar()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.supportsMinimumSQLGrammar()); } } @Test public void getResultSetHoldability() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getResultSetHoldability()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getResultSetHoldability()); } } @Test public void getMaxColumnsInGroupBy() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxColumnsInGroupBy()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxColumnsInGroupBy()); } } @Test public void supportsSubqueriesInComparisons() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsSubqueriesInComparisons()); + Assertions.assertThrows( + LinkisSQLException.class, () -> dbmd.supportsSubqueriesInComparisons()); } } @Test public void getMaxColumnsInSelect() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxColumnsInSelect()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxColumnsInSelect()); } } @Test public void nullPlusNonNullIsNull() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.nullPlusNonNullIsNull()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.nullPlusNonNullIsNull()); } } @@ -104,7 +105,7 @@ public void supportsCatalogsInDataManipulation() { public void supportsDataDefinitionAndDataManipulationTransactions() { if (dbmd != null) { Assertions.assertThrows( - UJESSQLException.class, + LinkisSQLException.class, () -> dbmd.supportsDataDefinitionAndDataManipulationTransactions()); } } @@ -112,7 +113,7 @@ public void supportsDataDefinitionAndDataManipulationTransactions() { @Test public void supportsTableCorrelationNames() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsTableCorrelationNames()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.supportsTableCorrelationNames()); } } @@ -133,7 +134,7 @@ public void supportsFullOuterJoins() { @Test public void supportsExpressionsInOrderBy() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.supportsExpressionsInOrderBy()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.supportsExpressionsInOrderBy()); } } @@ -147,14 +148,14 @@ public void allProceduresAreCallable() { @Test public void getMaxTablesInSelect() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxTablesInSelect()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxTablesInSelect()); } } @Test public void nullsAreSortedAtStart() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.nullsAreSortedAtStart()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.nullsAreSortedAtStart()); } } @@ -168,7 +169,7 @@ public void supportsPositionedUpdate() { @Test public void ownDeletesAreVisible() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.ownDeletesAreVisible(0)); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.ownDeletesAreVisible(0)); } } @@ -182,14 +183,14 @@ public void supportsResultSetHoldability() { @Test public void getMaxStatements() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getMaxStatements()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getMaxStatements()); } } @Test public void getRowIdLifetime() { if (dbmd != null) { - Assertions.assertThrows(UJESSQLException.class, () -> dbmd.getRowIdLifetime()); + Assertions.assertThrows(LinkisSQLException.class, () -> dbmd.getRowIdLifetime()); } } diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java index 54bc3d4538..da431c82e7 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLPreparedStatementTest.java @@ -35,8 +35,8 @@ * */ public class UJESSQLPreparedStatementTest { - private static UJESSQLConnection conn; - private UJESSQLPreparedStatement preStatement; + private static LinkisSQLConnection conn; + private LinkisSQLPreparedStatement preStatement; @BeforeAll public static void getConnection() { diff --git a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java index 0a545d8012..a8f0a179d0 100644 --- a/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java +++ b/linkis-computation-governance/linkis-jdbc-driver/src/test/java/org/apache/linkis/ujes/jdbc/UJESSQLResultSetTest.java @@ -33,8 +33,8 @@ public class UJESSQLResultSetTest { - private static UJESSQLConnection conn; - private UJESSQLPreparedStatement preStatement; + private static LinkisSQLConnection conn; + private LinkisSQLPreparedStatement preStatement; private UJESSQLResultSet resultSet; private UJESSQLResultSetMetaData metaData; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java similarity index 96% rename from linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java index cfbefdbaaa..064d61a6fb 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/LinkisManagerApplication.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/LinkisManagerApplication.java @@ -15,7 +15,7 @@ * limitations under the License. */ -package org.apache.linkis.manager.am; +package org.apache.linkis.manager; import org.apache.linkis.LinkisBaseServerApp; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java new file mode 100644 index 0000000000..1202433fb2 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/event/message/EngineConnPidCallbackEvent.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.event.message; + +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid; + +import org.springframework.context.ApplicationEvent; + +public class EngineConnPidCallbackEvent extends ApplicationEvent { + + private ResponseEngineConnPid protocol; + + public EngineConnPidCallbackEvent(ResponseEngineConnPid protocol) { + super(protocol); + this.protocol = protocol; + } + + public ResponseEngineConnPid getProtocol() { + return protocol; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java index 3734e3bdf6..c05768739c 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/exception/AMErrorCode.java @@ -28,7 +28,11 @@ public enum AMErrorCode implements LinkisErrorCode { NOT_EXISTS_ENGINE_CONN(210003, "Not exists EngineConn(不存在的引擎)"), - AM_CONF_ERROR(210004, "AM configuration error(AM配置错误)"); + AM_CONF_ERROR(210004, "AM configuration error(AM配置错误)"), + + ASK_ENGINE_ERROR_RETRY(210005, "Ask engine error, retry(请求引擎失败,重试)"), + + EC_OPERATE_ERROR(210006, "Failed to execute operation(引擎操作失败)"); private final int errorCode; diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java index 03a2b1465d..0896b93388 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/ECResourceInfoRestfulApi.java @@ -71,9 +71,10 @@ public class ECResourceInfoRestfulApi { public Message getECInfo( HttpServletRequest req, @RequestParam(value = "ticketid") String ticketid) throws AMErrorException { + logger.info("ticked: {} get ec info", ticketid); ECResourceInfoRecord ecResourceInfoRecord = ecResourceInfoService.getECResourceInfoRecord(ticketid); - String userName = ModuleUserUtils.getOperationUser(req, "getECInfo ticketid:") + ticketid; + String userName = ModuleUserUtils.getOperationUser(req, "getECInfo ticketid:" + ticketid); if (null != ecResourceInfoRecord && (userName.equalsIgnoreCase(ecResourceInfoRecord.getCreateUser()) || Configuration.isAdmin(userName))) { @@ -148,7 +149,7 @@ public Message queryEcrHistory( calendar.set(Calendar.SECOND, 0); startDate = calendar.getTime(); } - if (Configuration.isAdmin(username)) { + if (Configuration.isJobHistoryAdmin(username)) { username = null; if (StringUtils.isNotBlank(creatorUser)) { username = creatorUser; @@ -166,7 +167,7 @@ public Message queryEcrHistory( info -> { ECResourceInfoRecordVo ecrHistroryListVo = new ECResourceInfoRecordVo(); BeanUtils.copyProperties(info, ecrHistroryListVo); - ecrHistroryListVo.setEngineType(info.getLabelValue().split(",")[1].split("-")[0]); + ecrHistroryListVo.setEngineType(info.getEngineType()); ecrHistroryListVo.setUsedResource( ECResourceInfoUtils.getStringToMap(info.getUsedResource(), info)); ecrHistroryListVo.setReleasedResource( @@ -188,25 +189,49 @@ public Message queryEcrHistory( @ApiImplicitParam(name = "creators", dataType = "Array", required = true, value = "creators"), @ApiImplicitParam(name = "engineTypes", dataType = "Array", value = "engine type"), @ApiImplicitParam(name = "statuss", dataType = "Array", value = "statuss"), + @ApiImplicitParam(name = "queueName", dataType = "String", value = "queueName"), + @ApiImplicitParam(name = "ecInstances", dataType = "Array", value = "ecInstances"), }) @RequestMapping(path = "/ecList", method = RequestMethod.POST) public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNode) { + String username = ModuleUserUtils.getOperationUser(req, "ecList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to query ecList.", token); + return Message.error("Token:" + token + " has no permission to query ecList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to query ecList.", username); + return Message.error("User:" + username + " has no permission to query ecList."); + } JsonNode creatorsParam = jsonNode.get("creators"); JsonNode engineTypesParam = jsonNode.get("engineTypes"); JsonNode statussParam = jsonNode.get("statuss"); + JsonNode queueNameParam = jsonNode.get("queueName"); + JsonNode ecInstancesParam = jsonNode.get("ecInstances"); - if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { - return Message.error("creators is null in the parameters of the request(请求参数中【creators】为空)"); - } + // if (creatorsParam == null || creatorsParam.isNull() || creatorsParam.size() == 0) { + // return Message.error("creators is null in the parameters of the + // request(请求参数中【creators】为空)"); + // } List creatorUserList = new ArrayList<>(); - try { - creatorUserList = - JsonUtils.jackson() - .readValue(creatorsParam.toString(), new TypeReference>() {}); - } catch (JsonProcessingException e) { - return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + if (creatorsParam != null && !creatorsParam.isNull()) { + try { + creatorUserList = + JsonUtils.jackson() + .readValue(creatorsParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:creators parsing failed(请求参数【creators】解析失败)"); + } + for (String creatorUser : creatorUserList) { + if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { + return Message.error("Invalid creator: " + creatorUser); + } + } } List engineTypeList = new ArrayList<>(); @@ -230,35 +255,37 @@ public Message queryEcList(HttpServletRequest req, @RequestBody JsonNode jsonNod return Message.error("parameters:statuss parsing failed(请求参数【statuss】解析失败)"); } } - - String username = ModuleUserUtils.getOperationUser(req, "ecList"); - - String token = ModuleUserUtils.getToken(req); - // check special admin token - if (StringUtils.isNotBlank(token)) { - if (!Configuration.isAdminToken(token)) { - logger.warn("Token:{} has no permission to query ecList.", token); - return Message.error("Token:" + token + " has no permission to query ecList."); + String queueName = ""; + if (queueNameParam != null && !queueNameParam.isNull()) { + try { + queueName = + JsonUtils.jackson() + .readValue(queueNameParam.toString(), new TypeReference() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:queueName parsing failed(请求参数【queueName】解析失败)"); } - } else if (!Configuration.isAdmin(username)) { - logger.warn("User:{} has no permission to query ecList.", username); - return Message.error("User:" + username + " has no permission to query ecList."); } - - for (String creatorUser : creatorUserList) { - if (null != creatorUser && !ECResourceInfoUtils.checkNameValid(creatorUser)) { - return Message.error("Invalid creator: " + creatorUser); + List ecInstancesList = new ArrayList<>(); + if (ecInstancesParam != null && !ecInstancesParam.isNull()) { + try { + ecInstancesList = + JsonUtils.jackson() + .readValue(ecInstancesParam.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error("parameters:instanceName parsing failed(请求参数【ecInstances】解析失败)"); } } - logger.info( - "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}]", + "request parameters creatorUserList:[{}], engineTypeList:[{}], statusStrList:[{}], queueName:{}, instanceNameList:{}", String.join(",", creatorUserList), String.join(",", engineTypeList), - String.join(",", statusStrList)); + String.join(",", statusStrList), + String.join(",", ecInstancesList), + queueNameParam); List> list = - ecResourceInfoService.getECResourceInfoList(creatorUserList, engineTypeList, statusStrList); + ecResourceInfoService.getECResourceInfoList( + creatorUserList, engineTypeList, statusStrList, queueName, ecInstancesList); return Message.ok().data("ecList", list); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java index 4d8bbad27d..8218896c8a 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EMRestfulApi.java @@ -25,6 +25,7 @@ import org.apache.linkis.manager.am.exception.AMErrorCode; import org.apache.linkis.manager.am.exception.AMErrorException; import org.apache.linkis.manager.am.manager.EngineNodeManager; +import org.apache.linkis.manager.am.service.ECResourceInfoService; import org.apache.linkis.manager.am.service.em.ECMOperateService; import org.apache.linkis.manager.am.service.em.EMInfoService; import org.apache.linkis.manager.am.utils.AMUtils; @@ -33,6 +34,7 @@ import org.apache.linkis.manager.common.entity.metrics.NodeHealthyInfo; import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; import org.apache.linkis.manager.common.protocol.OperateRequest$; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest; import org.apache.linkis.manager.common.protocol.em.ECMOperateRequest$; @@ -91,6 +93,7 @@ public class EMRestfulApi { @Autowired private ECMOperateService ecmOperateService; + @Autowired private ECResourceInfoService ecResourceInfoService; private LabelBuilderFactory stdLabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory(); @@ -323,7 +326,10 @@ public Message executeECMOperationByEC(HttpServletRequest req, @RequestBody Json return Message.error( "You have no permission to execute ECM Operation by this EngineConn " + serviceInstance); } - return executeECMOperation(engineNode.getEMNode(), new ECMOperateRequest(userName, parameters)); + return executeECMOperation( + engineNode.getEMNode(), + engineNode.getServiceInstance().getInstance(), + new ECMOperateRequest(userName, parameters)); } @ApiOperation( @@ -354,7 +360,7 @@ public Message executeECMOperation(HttpServletRequest req, @RequestBody JsonNode "Fail to process the operation parameters, cased by " + ExceptionUtils.getRootCauseMessage(e)); } - return executeECMOperation(ecmNode, new ECMOperateRequest(userName, parameters)); + return executeECMOperation(ecmNode, "", new ECMOperateRequest(userName, parameters)); } @ApiOperation(value = "openEngineLog", notes = "open Engine log", response = Message.class) @@ -377,9 +383,10 @@ public Message openEngineLog(HttpServletRequest req, @RequestBody JsonNode jsonN String userName = ModuleUserUtils.getOperationUser(req, "openEngineLog"); EMNode ecmNode; Map parameters; + String engineInstance; try { String emInstance = jsonNode.get("emInstance").asText(); - String engineInstance = jsonNode.get("instance").asText(); + engineInstance = jsonNode.get("instance").asText(); ServiceInstance serviceInstance = EngineRestfulApi.getServiceInstance(jsonNode); logger.info("User {} try to open engine: {} log.", userName, serviceInstance); ecmNode = @@ -392,7 +399,10 @@ public Message openEngineLog(HttpServletRequest req, @RequestBody JsonNode jsonN jsonNode.get("parameters").toString(), new TypeReference>() {}); String logType = (String) parameters.get("logType"); - if (!logType.equals("stdout") && !logType.equals("stderr")) { + if (!logType.equals("stdout") + && !logType.equals("stderr") + && !logType.equals("gc") + && !logType.equals("yarnApp")) { throw new AMErrorException( AMErrorCode.PARAM_ERROR.getErrorCode(), AMErrorCode.PARAM_ERROR.getErrorDesc()); } @@ -413,16 +423,18 @@ public Message openEngineLog(HttpServletRequest req, @RequestBody JsonNode jsonN logger.error("Failed to open engine log, error:", e); return Message.error(e.getMessage()); } - return executeECMOperation(ecmNode, new ECMOperateRequest(userName, parameters)); + return executeECMOperation( + ecmNode, engineInstance, new ECMOperateRequest(userName, parameters)); } - private Message executeECMOperation(EMNode ecmNode, ECMOperateRequest ecmOperateRequest) { + private Message executeECMOperation( + EMNode ecmNode, String engineInstance, ECMOperateRequest ecmOperateRequest) { String operationName = OperateRequest$.MODULE$.getOperationName(ecmOperateRequest.parameters()); - if (ArrayUtils.contains(adminOperations, operationName) - && Configuration.isNotAdmin(ecmOperateRequest.user())) { + String userName = ecmOperateRequest.user(); + if (ArrayUtils.contains(adminOperations, operationName) && Configuration.isNotAdmin(userName)) { logger.warn( "User {} has no permission to execute {} admin Operation in ECM {}.", - ecmOperateRequest.user(), + userName, operationName, ecmNode.getServiceInstance()); return Message.error( @@ -431,6 +443,33 @@ private Message executeECMOperation(EMNode ecmNode, ECMOperateRequest ecmOperate + " admin Operation in ECM " + ecmNode.getServiceInstance()); } + + // fill in logDirSuffix + if (StringUtils.isNotBlank(engineInstance) + && Objects.isNull(ecmOperateRequest.parameters().get("logDirSuffix"))) { + ECResourceInfoRecord ecResourceInfoRecord = + ecResourceInfoService.getECResourceInfoRecordByInstance(engineInstance); + if (Objects.isNull(ecResourceInfoRecord)) { + return Message.error("ECM instance: " + ecmNode.getServiceInstance() + " not exist "); + } + // eg logDirSuffix -> root/20230705/io_file/6d48068a-0e1e-44b5-8eb2-835034db5b30/logs + String logDirSuffix = ecResourceInfoRecord.getLogDirSuffix(); + if (!userName.equals(ecResourceInfoRecord.getCreateUser()) + && Configuration.isNotJobHistoryAdmin(userName)) { + logger.warn( + "User {} has no permission to get log with path: {} in ECM:{}.", + userName, + logDirSuffix, + ecmNode.getServiceInstance()); + return Message.error( + "You have no permission to get log with path:" + + logDirSuffix + + " in ECM:" + + ecmNode.getServiceInstance()); + } + ecmOperateRequest.parameters().put("logDirSuffix", logDirSuffix); + } + ECMOperateResponse engineOperateResponse = ecmOperateService.executeOperation(ecmNode, ecmOperateRequest); diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java index 475c58c9e8..70b74387ca 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/restful/EngineRestfulApi.java @@ -23,28 +23,28 @@ import org.apache.linkis.common.utils.ByteTimeUtils; import org.apache.linkis.common.utils.JsonUtils; import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.governance.common.constant.ec.ECConstants; +import org.apache.linkis.governance.common.utils.JobUtils; +import org.apache.linkis.governance.common.utils.LoggerUtils; import org.apache.linkis.manager.am.conf.AMConfiguration; import org.apache.linkis.manager.am.exception.AMErrorCode; import org.apache.linkis.manager.am.exception.AMErrorException; import org.apache.linkis.manager.am.manager.EngineNodeManager; import org.apache.linkis.manager.am.service.ECResourceInfoService; -import org.apache.linkis.manager.am.service.engine.EngineCreateService; -import org.apache.linkis.manager.am.service.engine.EngineInfoService; -import org.apache.linkis.manager.am.service.engine.EngineOperateService; -import org.apache.linkis.manager.am.service.engine.EngineStopService; +import org.apache.linkis.manager.am.service.engine.*; import org.apache.linkis.manager.am.util.ECResourceInfoUtils; import org.apache.linkis.manager.am.utils.AMUtils; import org.apache.linkis.manager.am.vo.AMEngineNodeVo; +import org.apache.linkis.manager.common.constant.AMConstant; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.node.AMEMNode; +import org.apache.linkis.manager.common.entity.node.EMNode; import org.apache.linkis.manager.common.entity.node.EngineNode; import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; -import org.apache.linkis.manager.common.protocol.engine.EngineCreateRequest; -import org.apache.linkis.manager.common.protocol.engine.EngineOperateRequest; -import org.apache.linkis.manager.common.protocol.engine.EngineOperateResponse; -import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest; +import org.apache.linkis.manager.common.protocol.engine.*; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; import org.apache.linkis.manager.label.entity.Label; import org.apache.linkis.manager.label.entity.UserModifiable; import org.apache.linkis.manager.label.exception.LabelErrorException; @@ -65,11 +65,10 @@ import java.io.IOException; import java.text.MessageFormat; import java.util.*; +import java.util.concurrent.Callable; import java.util.stream.Collectors; import java.util.stream.Stream; -import scala.annotation.meta.param; - import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.JsonNode; @@ -91,6 +90,7 @@ public class EngineRestfulApi { @Autowired private EngineInfoService engineInfoService; + @Autowired private EngineAskEngineService engineAskService; @Autowired private EngineCreateService engineCreateService; @Autowired private EngineNodeManager engineNodeManager; @@ -103,6 +103,8 @@ public class EngineRestfulApi { @Autowired private ECResourceInfoService ecResourceInfoService; + @Autowired private EngineReuseService engineReuseService; + private final ObjectMapper objectMapper = new ObjectMapper(); private LabelBuilderFactory stdLabelBuilderFactory = @@ -110,6 +112,183 @@ public class EngineRestfulApi { private static final Logger logger = LoggerFactory.getLogger(EngineRestfulApi.class); + @ApiOperation(value = "askEngineConn", response = Message.class) + @ApiOperationSupport(ignoreParameters = {"jsonNode"}) + @RequestMapping(path = "/askEngineConn", method = RequestMethod.POST) + public Message askEngineConn( + HttpServletRequest req, @RequestBody EngineAskRequest engineAskRequest) + throws IOException, InterruptedException { + String userName = ModuleUserUtils.getOperationUser(req, "askEngineConn"); + engineAskRequest.setUser(userName); + long timeout = engineAskRequest.getTimeOut(); + if (timeout <= 0) { + timeout = AMConfiguration.ENGINE_CONN_START_REST_MAX_WAIT_TIME().getValue().toLong(); + engineAskRequest.setTimeOut(timeout); + } + Map retEngineNode = new HashMap<>(); + logger.info( + "User {} try to ask an engineConn with maxStartTime {}. EngineAskRequest is {}.", + userName, + ByteTimeUtils.msDurationToString(timeout), + engineAskRequest); + Sender sender = Sender.getSender(Sender.getThisServiceInstance()); + EngineNode engineNode = null; + + // try to reuse ec first + String taskId = JobUtils.getJobIdFromStringMap(engineAskRequest.getProperties()); + LoggerUtils.setJobIdMDC(taskId); + logger.info("received task : {}, engineAskRequest : {}", taskId, engineAskRequest); + if (!engineAskRequest.getLabels().containsKey(LabelKeyConstant.EXECUTE_ONCE_KEY)) { + EngineReuseRequest engineReuseRequest = new EngineReuseRequest(); + engineReuseRequest.setLabels(engineAskRequest.getLabels()); + engineReuseRequest.setTimeOut(engineAskRequest.getTimeOut()); + engineReuseRequest.setUser(engineAskRequest.getUser()); + engineReuseRequest.setProperties(engineAskRequest.getProperties()); + boolean end = false; + EngineNode reuseNode = null; + int count = 0; + int MAX_RETRY = 2; + while (!end) { + try { + reuseNode = engineReuseService.reuseEngine(engineReuseRequest, sender); + end = true; + } catch (LinkisRetryException e) { + logger.error( + "task: {}, user: {} reuse engine failed", taskId, engineReuseRequest.getUser(), e); + Thread.sleep(1000); + end = false; + count += 1; + if (count > MAX_RETRY) { + end = true; + } + } catch (Exception e1) { + logger.info( + "task: {} user: {} reuse engine failed", taskId, engineReuseRequest.getUser(), e1); + end = true; + } + } + if (null != reuseNode) { + logger.info( + "Finished to ask engine for task: {}, user: {} by reuse node {}", + taskId, + engineReuseRequest.getUser(), + reuseNode); + LoggerUtils.removeJobIdMDC(); + engineNode = reuseNode; + } + } + + if (null != engineNode) { + fillResultEngineNode(retEngineNode, engineNode); + return Message.ok("reuse engineConn ended.").data("engine", retEngineNode); + } + + String engineAskAsyncId = EngineAskEngineService$.MODULE$.getAsyncId(); + Callable createECTask = + new Callable() { + @Override + public Object call() { + LoggerUtils.setJobIdMDC(taskId); + logger.info( + "Task: {}, start to async({}) createEngine: {}", + taskId, + engineAskAsyncId, + engineAskRequest.getCreateService()); + // remove engineInstance label if exists + engineAskRequest.getLabels().remove("engineInstance"); + EngineCreateRequest engineCreateRequest = new EngineCreateRequest(); + engineCreateRequest.setLabels(engineAskRequest.getLabels()); + engineCreateRequest.setTimeout(engineAskRequest.getTimeOut()); + engineCreateRequest.setUser(engineAskRequest.getUser()); + engineCreateRequest.setProperties(engineAskRequest.getProperties()); + engineCreateRequest.setCreateService(engineAskRequest.getCreateService()); + try { + EngineNode createNode = engineCreateService.createEngine(engineCreateRequest, sender); + long timeout = 0L; + if (engineCreateRequest.getTimeout() <= 0) { + timeout = AMConfiguration.ENGINE_START_MAX_TIME().getValue().toLong(); + } else { + timeout = engineCreateRequest.getTimeout(); + } + // useEngine need to add timeout + EngineNode createEngineNode = engineNodeManager.useEngine(createNode, timeout); + if (null == createEngineNode) { + throw new LinkisRetryException( + AMConstant.EM_ERROR_CODE, + "create engine${createNode.getServiceInstance} success, but to use engine failed"); + } + logger.info( + "Task: $taskId finished to ask engine for user ${engineAskRequest.getUser} by create node $createEngineNode"); + return createEngineNode; + } catch (Exception e) { + logger.error( + "Task: {} failed to ask engine for user {} by create node", taskId, userName, e); + return new LinkisRetryException(AMConstant.EM_ERROR_CODE, e.getMessage()); + } finally { + LoggerUtils.removeJobIdMDC(); + } + } + }; + + try { + Object rs = createECTask.call(); + if (rs instanceof LinkisRetryException) { + throw (LinkisRetryException) rs; + } else { + engineNode = (EngineNode) rs; + } + } catch (LinkisRetryException retryException) { + logger.error( + "User {} create engineConn failed get retry exception. can be Retry", + userName, + retryException); + return Message.error( + String.format( + "Create engineConn failed, caused by %s.", + ExceptionUtils.getRootCauseMessage(retryException))) + .data("canRetry", true); + } catch (Exception e) { + LoggerUtils.removeJobIdMDC(); + logger.error("User {} create engineConn failed get retry exception", userName, e); + return Message.error( + String.format( + "Create engineConn failed, caused by %s.", ExceptionUtils.getRootCauseMessage(e))); + } + + LoggerUtils.removeJobIdMDC(); + fillResultEngineNode(retEngineNode, engineNode); + logger.info( + "Finished to create a engineConn for user {}. NodeInfo is {}.", userName, engineNode); + // to transform to a map + return Message.ok("create engineConn ended.").data("engine", retEngineNode); + } + + private void fillNullNode( + Map retEngineNode, EngineAskAsyncResponse askAsyncResponse) { + retEngineNode.put(AMConstant.EC_ASYNC_START_RESULT_KEY, AMConstant.EC_ASYNC_START_RESULT_FAIL); + retEngineNode.put( + AMConstant.EC_ASYNC_START_FAIL_MSG_KEY, + "Got null response for asyId : " + askAsyncResponse.id()); + retEngineNode.put(ECConstants.MANAGER_SERVICE_INSTANCE_KEY(), Sender.getThisServiceInstance()); + } + + private void fillResultEngineNode(Map retEngineNode, EngineNode engineNode) { + retEngineNode.put( + AMConstant.EC_ASYNC_START_RESULT_KEY, AMConstant.EC_ASYNC_START_RESULT_SUCCESS); + retEngineNode.put("serviceInstance", engineNode.getServiceInstance()); + if (null == engineNode.getNodeStatus()) { + engineNode.setNodeStatus(NodeStatus.Starting); + } + retEngineNode.put(ECConstants.NODE_STATUS_KEY(), engineNode.getNodeStatus().toString()); + retEngineNode.put(ECConstants.EC_TICKET_ID_KEY(), engineNode.getTicketId()); + EMNode emNode = engineNode.getEMNode(); + if (null != emNode) { + retEngineNode.put( + ECConstants.ECM_SERVICE_INSTANCE_KEY(), engineNode.getEMNode().getServiceInstance()); + } + retEngineNode.put(ECConstants.MANAGER_SERVICE_INSTANCE_KEY(), Sender.getThisServiceInstance()); + } + @ApiOperation(value = "createEngineConn", response = Message.class) @ApiOperationSupport(ignoreParameters = {"jsonNode"}) @RequestMapping(path = "/createEngineConn", method = RequestMethod.POST) @@ -149,13 +328,7 @@ public Message createEngineConn( "Finished to create a engineConn for user {}. NodeInfo is {}.", userName, engineNode); // to transform to a map Map retEngineNode = new HashMap<>(); - retEngineNode.put("serviceInstance", engineNode.getServiceInstance()); - if (null == engineNode.getNodeStatus()) { - engineNode.setNodeStatus(NodeStatus.Starting); - } - retEngineNode.put("nodeStatus", engineNode.getNodeStatus().toString()); - retEngineNode.put("ticketId", engineNode.getTicketId()); - retEngineNode.put("ecmServiceInstance", engineNode.getEMNode().getServiceInstance()); + fillResultEngineNode(retEngineNode, engineNode); return Message.ok("create engineConn succeed.").data("engine", retEngineNode); } @@ -173,6 +346,7 @@ public Message getEngineConn(HttpServletRequest req, @RequestBody JsonNode jsonN } catch (Exception e) { logger.info("Instances {} does not exist", serviceInstance.getInstance()); } + String ecMetrics = null; if (null == engineNode) { ECResourceInfoRecord ecInfo = null; if (null != ticketIdNode) { @@ -189,12 +363,19 @@ public Message getEngineConn(HttpServletRequest req, @RequestBody JsonNode jsonN if (null == ecInfo) { return Message.error("Instance does not exist " + serviceInstance); } + if (null == ecMetrics) { + ecMetrics = ecInfo.getMetrics(); + } engineNode = ECResourceInfoUtils.convertECInfoTOECNode(ecInfo); + } else { + ecMetrics = engineNode.getEcMetrics(); } if (!userName.equals(engineNode.getOwner()) && Configuration.isNotAdmin(userName)) { return Message.error("You have no permission to access EngineConn " + serviceInstance); } - return Message.ok().data("engine", engineNode); + Message result = Message.ok().data("engine", engineNode); + result.data(AMConstant.EC_METRICS_KEY, ecMetrics); + return result; } @ApiOperation(value = "kill egineconn", notes = "kill engineconn", response = Message.class) @@ -484,6 +665,11 @@ public Message executeEngineConnOperation(HttpServletRequest req, @RequestBody J ServiceInstance serviceInstance = getServiceInstance(jsonNode); logger.info("User {} try to execute Engine Operation {}.", userName, serviceInstance); EngineNode engineNode = engineNodeManager.getEngineNode(serviceInstance); + if (null == engineNode) { + return Message.ok() + .data("isError", true) + .data("errorMsg", "Ec : " + serviceInstance.toString() + " not found."); + } if (!userName.equals(engineNode.getOwner()) && Configuration.isNotAdmin(userName)) { return Message.error("You have no permission to execute Engine Operation " + serviceInstance); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java index 6aabe19198..fd9b384359 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/ECResourceInfoService.java @@ -40,8 +40,14 @@ List getECResourceInfoRecordList( * @param creatorUserList engineconn creator list * @param engineTypeList engineconn type list * @param statusStrList engineconn status string list + * @param queueName + * @param ecInstancesList * @return */ List> getECResourceInfoList( - List creatorUserList, List engineTypeList, List statusStrList); + List creatorUserList, + List engineTypeList, + List statusStrList, + String queueName, + List ecInstancesList); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java similarity index 82% rename from linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java index 8e83394738..194eea590d 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnPidCallbackService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnPidCallbackService.java @@ -15,12 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.ecm.server.service +package org.apache.linkis.manager.am.service; -import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid - -trait EngineConnPidCallbackService { - - def dealPid(protocol: ResponseEngineConnPid): Unit +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid; +@FunctionalInterface +public interface EngineConnPidCallbackService { + void dealPid(ResponseEngineConnPid protocol); } diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java similarity index 79% rename from linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java index 12ccc088be..477d49aa10 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/EngineConnStatusCallbackService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/EngineConnStatusCallbackService.java @@ -15,12 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.ecm.server.service +package org.apache.linkis.manager.am.service; -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback +import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM; -trait EngineConnStatusCallbackService { - - def dealEngineConnStatusCallback(protocol: EngineConnStatusCallback): Unit +public interface EngineConnStatusCallbackService { + void dealEngineConnStatusCallbackToAM(EngineConnStatusCallbackToAM engineConnStatusCallbackToAM); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java new file mode 100644 index 0000000000..3d199fe29c --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnPidCallbackService.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.service.impl; + +import org.apache.linkis.governance.common.protocol.task.ResponseEngineConnPid; +import org.apache.linkis.manager.am.manager.DefaultEngineNodeManager; +import org.apache.linkis.manager.am.service.EngineConnPidCallbackService; +import org.apache.linkis.manager.common.entity.node.EngineNode; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class DefaultEngineConnPidCallbackService implements EngineConnPidCallbackService { + private static final Logger logger = + LoggerFactory.getLogger(DefaultEngineConnPidCallbackService.class); + + @Autowired private DefaultEngineNodeManager defaultEngineNodeManager; + + @Receiver + @Override + public void dealPid(ResponseEngineConnPid protocol) { + // set pid + logger.info( + "DefaultEngineConnPidCallbackService dealPid serviceInstance: [{}] pid: [{}]" + + " ticketId: [{}]", + protocol.serviceInstance(), + protocol.pid(), + protocol.ticketId()); + + EngineNode engineNode = defaultEngineNodeManager.getEngineNode(protocol.serviceInstance()); + if (engineNode == null) { + logger.error( + "DefaultEngineConnPidCallbackService dealPid failed, engineNode is null, serviceInstance:{}", + protocol.serviceInstance()); + return; + } + + engineNode.setIdentifier(protocol.pid()); + defaultEngineNodeManager.updateEngine(engineNode); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java new file mode 100644 index 0000000000..7efeabd3e8 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/DefaultEngineConnStatusCallbackService.java @@ -0,0 +1,133 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.service.impl; + +import org.apache.linkis.manager.am.conf.AMConfiguration; +import org.apache.linkis.manager.am.service.EngineConnStatusCallbackService; +import org.apache.linkis.manager.am.service.engine.EngineStopService; +import org.apache.linkis.manager.common.constant.AMConstant; +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; +import org.apache.linkis.manager.common.entity.metrics.AMNodeMetrics; +import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallback; +import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM; +import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence; +import org.apache.linkis.manager.service.common.metrics.MetricsConverter; +import org.apache.linkis.rpc.message.annotation.Receiver; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.UnsupportedEncodingException; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class DefaultEngineConnStatusCallbackService implements EngineConnStatusCallbackService { + + private static final Logger logger = + LoggerFactory.getLogger(DefaultEngineConnStatusCallbackService.class); + + @Autowired private NodeMetricManagerPersistence nodeMetricManagerPersistence; + + @Autowired private MetricsConverter metricsConverter; + + @Autowired private EngineStopService engineStopService; + + private static final String[] canRetryLogs = + AMConfiguration.AM_CAN_RETRY_LOGS().getValue().split(";"); + + @Receiver + public void dealEngineConnStatusCallback(EngineConnStatusCallback protocol) { + logger.info( + "EngineConnStatusCallbackServiceImpl handle engineConnStatus callback serviceInstance: [{}] status: [{}]", + protocol.serviceInstance(), + protocol.status()); + if (!NodeStatus.isAvailable(protocol.status())) { + dealEngineConnStatusCallbackToAM( + new EngineConnStatusCallbackToAM( + protocol.serviceInstance(), protocol.status(), protocol.initErrorMsg(), false)); + } + } + + @Receiver + public void dealEngineConnStatusCallbackToAM( + EngineConnStatusCallbackToAM engineConnStatusCallbackToAM) { + if (engineConnStatusCallbackToAM.serviceInstance() == null) { + logger.warn("call back service instance is null"); + } + logger.info( + "EngineConnStatusCallbackServiceImpl start to deal engineConnStatusCallbackToAM {}", + engineConnStatusCallbackToAM); + + AMNodeMetrics nodeMetrics = new AMNodeMetrics(); + Map heartBeatMsg = new HashMap<>(); + int initErrorMsgMaxByteNum = 60000; + + String initErrorMsg = engineConnStatusCallbackToAM.initErrorMsg(); + try { + if (StringUtils.isNotBlank(initErrorMsg) + && initErrorMsg.getBytes("utf-8").length >= initErrorMsgMaxByteNum) { + initErrorMsg = initErrorMsg.substring(0, initErrorMsgMaxByteNum); + } + } catch (UnsupportedEncodingException e) { + logger.warn("dealEngineConnStatusCallbackToAM getBytes failed", e); + } + heartBeatMsg.put(AMConstant.START_REASON, initErrorMsg); + + if (engineConnStatusCallbackToAM.canRetry()) { + heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry()); + } else if (matchRetryLog(engineConnStatusCallbackToAM.initErrorMsg())) { + logger.info("match canRetry log {}", engineConnStatusCallbackToAM.serviceInstance()); + heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry()); + } + + try { + nodeMetrics.setHeartBeatMsg( + BDPJettyServerHelper.jacksonJson().writeValueAsString(heartBeatMsg)); + } catch (JsonProcessingException e) { + logger.warn("dealEngineConnStatusCallbackToAM writeValueAsString failed", e); + } + nodeMetrics.setServiceInstance(engineConnStatusCallbackToAM.serviceInstance()); + nodeMetrics.setStatus(metricsConverter.convertStatus(engineConnStatusCallbackToAM.status())); + + nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics); + logger.info("Finished to deal engineConnStatusCallbackToAM {}", engineConnStatusCallbackToAM); + } + + private boolean matchRetryLog(String errorMsg) { + boolean flag = false; + if (StringUtils.isNotBlank(errorMsg)) { + String errorMsgLowCase = errorMsg.toLowerCase(Locale.getDefault()); + for (String canRetry : canRetryLogs) { + if (errorMsgLowCase.contains(canRetry)) { + logger.info("match engineConn log fatal logs, is {}", canRetry); + flag = true; + } + } + } + return flag; + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java index e8988fee9b..fc07e603ad 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/service/impl/ECResourceInfoServiceImpl.java @@ -28,12 +28,15 @@ import org.apache.linkis.manager.dao.NodeManagerMapper; import org.apache.linkis.manager.label.service.NodeLabelService; import org.apache.linkis.manager.persistence.LabelManagerPersistence; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.commons.collections.MapUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; +import java.math.BigDecimal; import java.text.SimpleDateFormat; import java.util.*; import java.util.stream.Collectors; @@ -101,7 +104,11 @@ public List getECResourceInfoRecordList( @Override public List> getECResourceInfoList( - List creatorUserList, List engineTypeList, List statusStrList) { + List creatorUserList, + List engineTypeList, + List statusStrList, + String queueName, + List ecInstancesList) { List> resultList = new ArrayList<>(); @@ -113,7 +120,7 @@ public List> getECResourceInfoList( // get engine conn info list filter by creator user list /instance status list List ecNodesInfo = - nodeManagerMapper.getEMNodeInfoList(creatorUserList, statusIntList); + nodeManagerMapper.getEMNodeInfoList(creatorUserList, statusIntList, ecInstancesList); // map k:v---> instanceName:PersistencerEcNodeInfo Map persistencerEcNodeInfoMap = @@ -143,8 +150,8 @@ public List> getECResourceInfoList( json.writeValueAsString(ecNodeinfo), new TypeReference>() {}); - Integer intStatus = ecNodeinfo.getInstanceStatus(); - item.put("instanceStatus", NodeStatus.values()[intStatus].name()); + Integer instanceStatus = ecNodeinfo.getInstanceStatus(); + item.put("instanceStatus", NodeStatus.values()[instanceStatus].name()); String usedResourceStr = latestRecord.getUsedResource(); /* @@ -152,12 +159,37 @@ public List> getECResourceInfoList( -> {"driver":{"instance":1,"memory":"2.0 GB","cpu":1} } */ - + long lastUnlockTimestamp = 0L; + if (NodeStatus.values()[instanceStatus].name().equals(NodeStatus.Unlock.name())) { + String heartbeatMsg = ecNodeinfo.getHeartbeatMsg(); + Map heartbeatMap = new HashMap<>(); + if (StringUtils.isNotBlank(heartbeatMsg)) { + heartbeatMap = + BDPJettyServerHelper.gson() + .fromJson(heartbeatMsg, new HashMap<>().getClass()); + } + Object lastUnlockTimestampObject = + heartbeatMap.getOrDefault("lastUnlockTimestamp", 0); + BigDecimal lastUnlockTimestampBigDecimal = + new BigDecimal(String.valueOf(lastUnlockTimestampObject)); + lastUnlockTimestamp = lastUnlockTimestampBigDecimal.longValue(); + } + item.put("lastUnlockTimestamp", lastUnlockTimestamp); item.put("useResource", ECResourceInfoUtils.getStringToMap(usedResourceStr)); item.put("ecmInstance", latestRecord.getEcmInstance()); - String engineType = latestRecord.getLabelValue().split(",")[1].split("-")[0]; + String engineType = latestRecord.getEngineType(); item.put("engineType", engineType); - resultList.add(item); + if (StringUtils.isNotBlank(queueName)) { + Map usedResourceMap = + ECResourceInfoUtils.getStringToMap(usedResourceStr); + Map yarn = MapUtils.getMap(usedResourceMap, "yarn", new HashMap()); + String queueNameStr = String.valueOf(yarn.getOrDefault("queueName", "")); + if (StringUtils.isNotBlank(queueNameStr) && queueName.equals(queueNameStr)) { + resultList.add(item); + } + } else { + resultList.add(item); + } } catch (JsonProcessingException e) { logger.error("Fail to process the ec node info: [{}]", ecNodeinfo, e); } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java index ab926df64e..85c7470ce5 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/org/apache/linkis/manager/am/util/ECResourceInfoUtils.java @@ -61,7 +61,7 @@ public static ResourceVo getStringToMap(String str, ECResourceInfoRecord info) { Map divermap = MapUtils.getMap(map, "driver"); resourceVo.setInstance(((Double) divermap.get("instance")).intValue()); resourceVo.setCores(((Double) divermap.get("cpu")).intValue()); - String memoryStr = String.valueOf(map.getOrDefault("memory", "0k")); + String memoryStr = String.valueOf(divermap.getOrDefault("memory", "0k")); long memorylong = 0; if (!getScientific(memoryStr)) { memorylong = ByteTimeUtils.byteStringAsBytes(memoryStr); @@ -125,6 +125,7 @@ public static AMEngineNode convertECInfoTOECNode(ECResourceInfoRecord ecInfo) { engineNode.setTicketId(ecInfo.getTicketId()); engineNode.setStartTime(ecInfo.getCreateTime()); engineNode.setUpdateTime(ecInfo.getReleaseTime()); + engineNode.setEcMetrics(ecInfo.getMetrics()); return engineNode; } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.scala index de1add173a..9a16d9b9e6 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/conf/EngineConnPluginConfiguration.scala @@ -35,4 +35,8 @@ object EngineConnPluginConfiguration { val ENABLED_BML_UPLOAD_FAILED_EXIT: CommonVars[Boolean] = CommonVars("wds.linkis.engineconn.bml.upload.failed.enable", true) + // for third party eg appconn/datax, if all update, can set to false then to remove + val EC_BML_VERSION_MAY_WITH_PREFIX_V: CommonVars[Boolean] = + CommonVars("linkis.engineconn.bml.version.may.with.prefix", true) + } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala index fce3508c31..bba2f5d93d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/localize/AbstractEngineConnBmlResourceGenerator.scala @@ -17,6 +17,7 @@ package org.apache.linkis.engineplugin.server.localize +import org.apache.linkis.engineplugin.server.conf.EngineConnPluginConfiguration import org.apache.linkis.engineplugin.server.conf.EngineConnPluginConfiguration.ENGINE_CONN_HOME import org.apache.linkis.engineplugin.server.localize.EngineConnBmlResourceGenerator.NO_VERSION_MARK import org.apache.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException @@ -53,13 +54,38 @@ abstract class AbstractEngineConnBmlResourceGenerator extends EngineConnBmlResou val engineConnPackageHome = Paths.get(engineConnDistHome, version).toFile.getPath logger.info("getEngineConnDistHome, engineConnPackageHome path:" + engineConnPackageHome) val engineConnPackageHomeFile = new File(engineConnPackageHome) + + // 兼容老版本 if (!engineConnPackageHomeFile.exists()) { - throw new EngineConnPluginErrorException( - ENGINE_VERSION_NOT_FOUND.getErrorCode, - MessageFormat.format(ENGINE_VERSION_NOT_FOUND.getErrorDesc, version, engineConnType) - ) + if ( + !version.startsWith( + "v" + ) && EngineConnPluginConfiguration.EC_BML_VERSION_MAY_WITH_PREFIX_V.getValue + ) { + val versionOld = "v" + version + val engineConnPackageHomeOld = Paths.get(engineConnDistHome, versionOld).toFile.getPath + logger.info( + "try to getEngineConnDistHome with prefix v, engineConnPackageHome path:" + engineConnPackageHomeOld + ) + val engineConnPackageHomeFileOld = new File(engineConnPackageHomeOld) + if (!engineConnPackageHomeFileOld.exists()) { + throw new EngineConnPluginErrorException( + ENGINE_VERSION_NOT_FOUND.getErrorCode, + MessageFormat.format(ENGINE_VERSION_NOT_FOUND.getErrorDesc, version, engineConnType) + ) + } else { + engineConnPackageHomeOld + } + } else { + throw new EngineConnPluginErrorException( + ENGINE_VERSION_NOT_FOUND.getErrorCode, + MessageFormat.format(ENGINE_VERSION_NOT_FOUND.getErrorDesc, version, engineConnType) + ) + } + } else { + engineConnPackageHome } - engineConnPackageHome + } private def checkEngineConnDistHome(engineConnPackageHomePath: String): Unit = { @@ -71,7 +97,7 @@ abstract class AbstractEngineConnBmlResourceGenerator extends EngineConnBmlResou if (!engineConnPackageHome.exists()) { throw new EngineConnPluginErrorException( CANNOT_HOME_PATH_DIST.getErrorCode, - CANNOT_HOME_PATH_DIST.getErrorDesc + MessageFormat.format(CANNOT_HOME_PATH_DIST.getErrorDesc, engineConnPackageHome.getPath) ) } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.scala index 3ddfdc7bbe..55f3b2568a 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/engineplugin/server/service/DefaultEngineConnResourceService.scala @@ -201,8 +201,6 @@ class DefaultEngineConnResourceService extends EngineConnResourceService with Lo engineConnBmlResource.setFileName(localizeResource.fileName) engineConnBmlResource.setFileSize(localizeResource.fileSize) engineConnBmlResource.setLastModified(localizeResource.lastModified) - if (version.startsWith("v")) engineConnBmlResource.setVersion(version.substring(1)) - else engineConnBmlResource.setVersion(version) engineConnBmlResource.setVersion(version) engineConnBmlResourceDao.save(engineConnBmlResource) } else { @@ -241,9 +239,18 @@ class DefaultEngineConnResourceService extends EngineConnResourceService with Lo ): EngineConnResource = { val engineConnType = engineConnBMLResourceRequest.getEngineConnType val version = engineConnBMLResourceRequest.getVersion - val engineConnBmlResources = asScalaBufferConverter( + var engineConnBmlResources = asScalaBufferConverter( engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, version) ) + if ( + engineConnBmlResources.asScala.size == 0 && EngineConnPluginConfiguration.EC_BML_VERSION_MAY_WITH_PREFIX_V.getValue + ) { + logger.info("Try to get engine conn bml resource with prefex v") + engineConnBmlResources = asScalaBufferConverter( + engineConnBmlResourceDao.getAllEngineConnBmlResource(engineConnType, "v" + version) + ) + } + val confBmlResourceMap = engineConnBmlResources.asScala .find(_.getFileName == LaunchConstants.ENGINE_CONN_CONF_DIR_NAME + ".zip") .map(parseToBmlResource) diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobSubType.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHook.scala similarity index 84% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobSubType.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHook.scala index c2c8539e4b..7240288698 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobSubType.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHook.scala @@ -15,9 +15,11 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.job; +package org.apache.linkis.manager.am.hook -public interface JobSubType { +trait AskEngineConnHook { + + @throws(classOf[Exception]) + def doHook(ctx: AskEngineConnHookContext): Unit - String getName(); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJob.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHookContext.scala similarity index 72% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJob.java rename to linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHookContext.scala index a32521c20e..d5fcdb9686 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/LinkisJob.java +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/hook/AskEngineConnHookContext.scala @@ -15,12 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job; +package org.apache.linkis.manager.am.hook -import org.apache.linkis.cli.application.interactor.job.data.LinkisJobData; -import org.apache.linkis.cli.core.interactor.job.AbstractJob; +import org.apache.linkis.manager.common.protocol.engine.EngineAskRequest +import org.apache.linkis.rpc.Sender -public abstract class LinkisJob extends AbstractJob { - @Override - public abstract LinkisJobData getJobData(); +class AskEngineConnHookContext(request: EngineAskRequest, sender: Sender) { + def getRequest(): EngineAskRequest = request + def getSender(): Sender = sender } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala index 86cf986d29..68e7d4cbd9 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/manager/DefaultEngineNodeManager.scala @@ -25,7 +25,12 @@ import org.apache.linkis.manager.am.exception.{AMErrorCode, AMErrorException} import org.apache.linkis.manager.am.locker.EngineNodeLocker import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.enumeration.NodeStatus -import org.apache.linkis.manager.common.entity.node.{AMEngineNode, EngineNode, ScoreServiceInstance} +import org.apache.linkis.manager.common.entity.node.{ + AMEngineNode, + EngineNode, + RMNode, + ScoreServiceInstance +} import org.apache.linkis.manager.common.entity.persistence.PersistenceLabel import org.apache.linkis.manager.common.protocol.engine.{ EngineOperateRequest, @@ -42,6 +47,8 @@ import org.apache.linkis.manager.rm.service.ResourceManager import org.apache.linkis.manager.service.common.metrics.MetricsConverter import org.apache.linkis.manager.service.common.pointer.NodePointerBuilder +import org.apache.commons.lang3.StringUtils + import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Service @@ -49,6 +56,7 @@ import java.lang.reflect.UndeclaredThrowableException import java.util import scala.collection.JavaConverters._ +import scala.tools.scalap.scalax.util.StringUtil @Service class DefaultEngineNodeManager extends EngineNodeManager with Logging { @@ -105,7 +113,6 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging { val heartMsg = engine.getNodeHeartbeatMsg() engineNode.setNodeHealthyInfo(heartMsg.getHealthyInfo) engineNode.setNodeOverLoadInfo(heartMsg.getOverLoadInfo) - engineNode.setNodeResource(heartMsg.getNodeResource) engineNode.setNodeStatus(heartMsg.getStatus) engineNode } @@ -125,7 +132,9 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging { toState: NodeStatus ): Unit = {} - override def updateEngine(engineNode: EngineNode): Unit = {} + override def updateEngine(engineNode: EngineNode): Unit = { + nodeManagerPersistence.updateNodeInstance(engineNode) + } override def switchEngine(engineNode: EngineNode): EngineNode = { null @@ -133,7 +142,7 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging { override def reuseEngine(engineNode: EngineNode): EngineNode = { val node = getEngineNodeInfo(engineNode) - if (!NodeStatus.isAvailable(node.getNodeStatus)) { + if (node == null || !NodeStatus.isAvailable(node.getNodeStatus)) { return null } if (!NodeStatus.isLocked(node.getNodeStatus)) { @@ -164,7 +173,7 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging { retryHandler.addRetryException(classOf[UndeclaredThrowableException]) val node = retryHandler.retry[EngineNode](getEngineNodeInfo(engineNode), "getEngineNodeInfo") // val node = getEngineNodeInfo(engineNode) - if (!NodeStatus.isAvailable(node.getNodeStatus)) { + if (node == null || !NodeStatus.isAvailable(node.getNodeStatus)) { return null } if (!NodeStatus.isLocked(node.getNodeStatus)) { @@ -293,13 +302,16 @@ class DefaultEngineNodeManager extends EngineNodeManager with Logging { override def getEngineNodeInfo(serviceInstance: ServiceInstance): EngineNode = { val engineNode = getEngineNode(serviceInstance) if (engineNode != null) { + val nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode) if (engineNode.getNodeStatus == null) { - val nodeMetric = nodeMetricManagerPersistence.getNodeMetrics(engineNode) engineNode.setNodeStatus( if (Option(nodeMetric).isDefined) NodeStatus.values()(nodeMetric.getStatus) else NodeStatus.Starting ) } + if (null != nodeMetric && StringUtils.isNotBlank(nodeMetric.getHeartBeatMsg)) { + engineNode.setEcMetrics(nodeMetric.getHeartBeatMsg) + } return engineNode } throw new AMErrorException( diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.scala index 5bd74edcde..8fc82f6c4b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEMNodPointer.scala @@ -17,6 +17,7 @@ package org.apache.linkis.manager.am.pointer +import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.Utils import org.apache.linkis.manager.am.exception.AMErrorException import org.apache.linkis.manager.am.utils.AMUtils @@ -38,16 +39,17 @@ class DefaultEMNodPointer(val node: Node) extends AbstractNodePointer with EMNod override def createEngine(engineConnLaunchRequest: EngineConnLaunchRequest): EngineNode = { logger.info(s"Start to createEngine ask em ${getNode().getServiceInstance}") - getSender.ask(engineConnLaunchRequest) match { + val ec = getSender.ask(engineConnLaunchRequest) + ec match { case engineNode: EngineNode => logger.info( s"Succeed to createEngine ask em ${getNode().getServiceInstance}, engineNode $engineNode " ) engineNode case _ => - throw new AMErrorException( + throw new LinkisRetryException( AMConstant.ENGINE_ERROR_CODE, - s"Failed to createEngine ask em ${getNode().getServiceInstance}" + s"Failed to createEngine ask em ${getNode().getServiceInstance}, for return ${ec}" ) } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.scala index f1bcf8eb9a..eda42b6dbf 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/pointer/DefaultEngineNodPointer.scala @@ -59,7 +59,8 @@ class DefaultEngineNodPointer(val node: Node) extends AbstractNodePointer with E override def executeOperation( engineOperateRequest: EngineOperateRequest ): EngineOperateResponse = { - getSender.ask(engineOperateRequest) match { + val rs = getSender.ask(engineOperateRequest) + rs match { case response: EngineOperateResponse => response case _ => throw new WarnException(-1, "Illegal response of operation.") } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala index 918faf912f..f4af1c5550 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMEngineService.scala @@ -19,21 +19,35 @@ package org.apache.linkis.manager.am.service.em import org.apache.linkis.common.utils.Logging import org.apache.linkis.engineplugin.server.service.EngineConnLaunchService +import org.apache.linkis.governance.common.utils.ECPathUtils import org.apache.linkis.manager.am.exception.AMErrorException import org.apache.linkis.manager.am.manager.{EMNodeManager, EngineNodeManager} +import org.apache.linkis.manager.am.service.ECResourceInfoService import org.apache.linkis.manager.am.service.EMEngineService import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.entity.node._ +import org.apache.linkis.manager.common.entity.persistence.{ + ECResourceInfoRecord, + PersistenceResource +} import org.apache.linkis.manager.common.protocol.em._ import org.apache.linkis.manager.common.protocol.engine.EngineStopRequest import org.apache.linkis.manager.common.utils.ManagerUtils import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest import org.apache.linkis.manager.label.entity.{EngineNodeLabel, Label} import org.apache.linkis.manager.label.entity.em.EMInstanceLabel +import org.apache.linkis.manager.label.entity.engine.{ + EngineInstanceLabel, + EngineTypeLabel, + UserCreatorLabel +} import org.apache.linkis.manager.label.service.NodeLabelService +import org.apache.linkis.manager.rm.domain.RMLabelContainer +import org.apache.linkis.manager.rm.service.LabelResourceService import org.apache.linkis.manager.service.common.label.LabelFilter import org.apache.commons.collections.MapUtils +import org.apache.commons.lang3.StringUtils import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Service @@ -57,9 +71,15 @@ class DefaultEMEngineService extends EMEngineService with Logging { @Autowired private var engineConnLaunchService: EngineConnLaunchService = _ + @Autowired + private var ecResourceInfoService: ECResourceInfoService = _ + @Autowired private var labelFilter: LabelFilter = _ + @Autowired + private var labelResourceService: LabelResourceService = _ + override def listEngines(getEMEnginesRequest: GetEMEnginesRequest): util.List[EngineNode] = { val emNode = new AMEMNode() emNode.setServiceInstance(getEMEnginesRequest.getEm) @@ -95,8 +115,54 @@ class DefaultEMEngineService extends EMEngineService with Logging { ) val engineStopRequest = new EngineStopRequest engineStopRequest.setServiceInstance(engineNode.getServiceInstance) + engineStopRequest.setIdentifierType(engineNode.getMark) + engineStopRequest.setIdentifier(engineNode.getIdentifier) + val ecResourceInfo: ECResourceInfoRecord = + if (StringUtils.isNotBlank(engineNode.getTicketId)) { + ecResourceInfoService.getECResourceInfoRecord(engineNode.getTicketId) + } else { + ecResourceInfoService.getECResourceInfoRecordByInstance( + engineNode.getServiceInstance.getInstance + ) + } + + if (ecResourceInfo != null) { + engineStopRequest.setEngineType(ecResourceInfo.getEngineType()) + engineStopRequest.setLogDirSuffix(ecResourceInfo.getLogDirSuffix) + } else { + if (engineNode.getLabels.isEmpty) { + // node labels is empty, engine already been stopped + logger.info( + s"DefaultEMEngineService stopEngine node labels is empty, engine: ${engineStopRequest.getServiceInstance} have already been stopped." + ) + return + } + + val rMLabelContainer: RMLabelContainer = + labelResourceService.enrichLabels(engineNode.getLabels) + + val persistenceResource: PersistenceResource = + labelResourceService.getPersistenceResource(rMLabelContainer.getEngineInstanceLabel) + if (persistenceResource == null) { + // persistenceResource is null, engine already been stopped + logger.info( + s"DefaultEMEngineService stopEngine persistenceResource is null, engine: ${engineStopRequest.getServiceInstance} have already been stopped." + ) + return + } + + engineStopRequest.setEngineType(rMLabelContainer.getEngineTypeLabel.getEngineType) + engineStopRequest.setLogDirSuffix( + ECPathUtils + .getECLogDirSuffix( + rMLabelContainer.getEngineTypeLabel, + rMLabelContainer.getUserCreatorLabel, + persistenceResource.getTicketId + ) + ) + } + emNodeManager.stopEngine(engineStopRequest, emNode) - // engineNodeManager.deleteEngineNode(engineNode) logger.info( s"EM ${emNode.getServiceInstance} finished to stop Engine ${engineNode.getServiceInstance}" ) @@ -116,7 +182,11 @@ class DefaultEMEngineService extends EMEngineService with Logging { val filterInstanceAndLabel = if (emInstanceLabelOption.isDefined) { val emInstanceLabel = emInstanceLabelOption.get.asInstanceOf[EMInstanceLabel] logger.info(s"use emInstanceLabel , will be route to ${emInstanceLabel.getServiceInstance}") - if (!instanceAndLabels.asScala.exists(_._1.equals(emInstanceLabel.getServiceInstance))) { + if ( + !instanceAndLabels.asScala.exists( + _._1.getServiceInstance.equals(emInstanceLabel.getServiceInstance) + ) + ) { throw new AMErrorException( AMConstant.EM_ERROR_CODE, s"You specified em ${emInstanceLabel.getServiceInstance}, but the corresponding EM does not exist in the Manager" diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.scala index b54815092f..cdcc670fdd 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/em/DefaultEMUnregisterService.scala @@ -58,11 +58,6 @@ class DefaultEMUnregisterService extends EMUnregisterService with Logging { val emClearRequest = new EMInfoClearRequest emClearRequest.setEm(node) emClearRequest.setUser(stopEMRequest.getUser) - Utils.tryAndWarn(rmMessageService.dealWithStopEMRequest(stopEMRequest)) - // clear Label - val instanceLabelRemoveRequest = new NodeLabelRemoveRequest(node.getServiceInstance, false) - Utils.tryAndWarn(nodeLabelRemoveService.removeNodeLabel(instanceLabelRemoveRequest)) - // 此处需要先清理ECM再等待,避免ECM重启过快,导致ECM资源没清理干净 clearEMInstanceInfo(emClearRequest) logger.info(s" user ${stopEMRequest.getUser} finished to stop em ${stopEMRequest.getEm}") } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala index 4616cf85a1..6ffff3ea2f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala @@ -19,20 +19,25 @@ package org.apache.linkis.manager.am.service.engine import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.{Logging, Utils} -import org.apache.linkis.governance.common.utils.JobUtils +import org.apache.linkis.governance.common.utils.{JobUtils, LoggerUtils} import org.apache.linkis.manager.am.conf.AMConfiguration +import org.apache.linkis.manager.am.hook.{AskEngineConnHook, AskEngineConnHookContext} +import org.apache.linkis.manager.am.service.engine.EngineAskEngineService.getAsyncId import org.apache.linkis.manager.common.constant.AMConstant import org.apache.linkis.manager.common.protocol.engine._ import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.rpc.Sender import org.apache.linkis.rpc.message.annotation.Receiver +import org.apache.commons.lang3.StringUtils import org.apache.commons.lang3.exception.ExceptionUtils -import org.springframework.beans.factory.annotation.Autowired +import org.springframework.beans.factory.annotation.{Autowired, Qualifier} import org.springframework.stereotype.Service import java.net.SocketTimeoutException +import java.util +import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.AtomicInteger import scala.concurrent._ @@ -55,9 +60,10 @@ class DefaultEngineAskEngineService @Autowired private var engineSwitchService: EngineSwitchService = _ - private val idCreator = new AtomicInteger() - - private val idPrefix = Sender.getThisServiceInstance.getInstance + @Autowired(required = false) + @Qualifier + /* The implementation class of hook must be annotated with @Qualifier to take effect(hook的实现类必须加上@Qualifier注解才能生效) */ + var hooksArray: Array[AskEngineConnHook] = _ private implicit val executor: ExecutionContextExecutorService = Utils.newCachedExecutionContext( @@ -67,7 +73,38 @@ class DefaultEngineAskEngineService @Receiver override def askEngine(engineAskRequest: EngineAskRequest, sender: Sender): Any = { + + if (hooksArray != null && hooksArray.size > 0) { + val ctx = new AskEngineConnHookContext(engineAskRequest, sender) + + /** Throwing exceptions in hook will block(hook中抛异常会阻断) */ + hooksArray.foreach(h => + Utils.tryCatch(h.doHook(ctx)) { t => + { + val engineAskAsyncId = getAsyncId + val retryFlag = t match { + case _: LinkisRetryException => true + case _: RetryableException => true + case _ => + ExceptionUtils.getRootCause(t) match { + case _: SocketTimeoutException => true + case _: TimeoutException => true + case _ => + false + } + } + return EngineCreateError( + engineAskAsyncId, + ExceptionUtils.getRootCauseMessage(t), + retryFlag + ) + } + } + ) + } + val taskId = JobUtils.getJobIdFromStringMap(engineAskRequest.getProperties) + LoggerUtils.setJobIdMDC(taskId) logger.info(s"received task: $taskId, engineAskRequest $engineAskRequest") if (!engineAskRequest.getLabels.containsKey(LabelKeyConstant.EXECUTE_ONCE_KEY)) { val engineReuseRequest = new EngineReuseRequest() @@ -91,16 +128,18 @@ class DefaultEngineAskEngineService logger.info( s"Finished to ask engine for task: $taskId user ${engineAskRequest.getUser} by reuse node $reuseNode" ) + LoggerUtils.removeJobIdMDC() return reuseNode } } val engineAskAsyncId = getAsyncId val createNodeThread = Future { + LoggerUtils.setJobIdMDC(taskId) logger.info( s"Task: $taskId start to async($engineAskAsyncId) createEngine, ${engineAskRequest.getCreateService}" ) - // 如果原来的labels含engineInstance ,先去掉 + // If the original labels contain engineInstance, remove it first (如果原来的labels含engineInstance ,先去掉) engineAskRequest.getLabels.remove("engineInstance") val engineCreateRequest = new EngineCreateRequest engineCreateRequest.setLabels(engineAskRequest.getLabels) @@ -108,30 +147,44 @@ class DefaultEngineAskEngineService engineCreateRequest.setUser(engineAskRequest.getUser) engineCreateRequest.setProperties(engineAskRequest.getProperties) engineCreateRequest.setCreateService(engineAskRequest.getCreateService) - val createNode = engineCreateService.createEngine(engineCreateRequest, sender) - val timeout = - if (engineCreateRequest.getTimeout <= 0) { - AMConfiguration.ENGINE_START_MAX_TIME.getValue.toLong - } else engineCreateRequest.getTimeout - // useEngine 需要加上超时 - val createEngineNode = getEngineNodeManager.useEngine(createNode, timeout) - if (null == createEngineNode) { - throw new LinkisRetryException( - AMConstant.EM_ERROR_CODE, - s"create engine${createNode.getServiceInstance} success, but to use engine failed" + Utils.tryFinally { + val createNode = engineCreateService.createEngine(engineCreateRequest, sender) + val timeout = + if (engineCreateRequest.getTimeout <= 0) { + AMConfiguration.ENGINE_START_MAX_TIME.getValue.toLong + } else engineCreateRequest.getTimeout + // UseEngine requires a timeout (useEngine 需要加上超时) + val createEngineNode = getEngineNodeManager.useEngine(createNode, timeout) + if (null == createEngineNode) { + throw new LinkisRetryException( + AMConstant.EM_ERROR_CODE, + s"create engine${createNode.getServiceInstance} success, but to use engine failed" + ) + } + logger.info( + s"Task: $taskId finished to ask engine for user ${engineAskRequest.getUser} by create node $createEngineNode" ) + createEngineNode + } { + LoggerUtils.removeJobIdMDC() } - logger.info( - s"Task: $taskId finished to ask engine for user ${engineAskRequest.getUser} by create node $createEngineNode" - ) - createEngineNode } createNodeThread.onComplete { case Success(engineNode) => - logger.info(s"Task: $taskId Success to async($engineAskAsyncId) createEngine $engineNode") - sender.send(EngineCreateSuccess(engineAskAsyncId, engineNode)) + LoggerUtils.setJobIdMDC(taskId) + Utils.tryFinally { + logger.info(s"Task: $taskId Success to async($engineAskAsyncId) createEngine $engineNode") + if (null != sender) { + sender.send(EngineCreateSuccess(engineAskAsyncId, engineNode)) + } else { + logger.info("Will not send async useing null sender.") + } + } { + LoggerUtils.removeJobIdMDC() + } case Failure(exception) => + LoggerUtils.setJobIdMDC(taskId) val retryFlag = exception match { case retryException: LinkisRetryException => true case retryableException: RetryableException => true @@ -151,20 +204,20 @@ class DefaultEngineAskEngineService logger.info(s"msg: ${msg} canRetry Exception: ${exception.getClass.getName}") } - sender.send( - EngineCreateError( - engineAskAsyncId, - ExceptionUtils.getRootCauseMessage(exception), - retryFlag + Utils.tryFinally { + sender.send( + EngineCreateError( + engineAskAsyncId, + ExceptionUtils.getRootCauseMessage(exception), + retryFlag + ) ) - ) + } { + LoggerUtils.removeJobIdMDC() + } } - + LoggerUtils.removeJobIdMDC() EngineAskAsyncResponse(engineAskAsyncId, Sender.getThisServiceInstance) } - private def getAsyncId: String = { - idPrefix + "_" + idCreator.getAndIncrement() - } - } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala deleted file mode 100644 index ccfcb7aa28..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.manager.am.service.engine - -import org.apache.linkis.common.utils.Logging -import org.apache.linkis.manager.am.conf.AMConfiguration -import org.apache.linkis.manager.common.constant.AMConstant -import org.apache.linkis.manager.common.entity.metrics.AMNodeMetrics -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM -import org.apache.linkis.manager.persistence.NodeMetricManagerPersistence -import org.apache.linkis.manager.service.common.metrics.MetricsConverter -import org.apache.linkis.rpc.message.annotation.Receiver -import org.apache.linkis.server.BDPJettyServerHelper - -import org.apache.commons.lang3.StringUtils - -import org.springframework.beans.factory.annotation.Autowired -import org.springframework.stereotype.Service - -import java.util -import java.util.Locale - -@Service -class DefaultEngineConnStatusCallbackService extends EngineConnStatusCallbackService with Logging { - - @Autowired - private var nodeMetricManagerPersistence: NodeMetricManagerPersistence = _ - - @Autowired - private var metricsConverter: MetricsConverter = _ - - private val canRetryLogs = AMConfiguration.AM_CAN_RETRY_LOGS.getValue.split(";") - - // The heartBeatMsg field is of type text, mysql text max byte num is 65535 - private val initErrorMsgMaxByteNum = 60000 - - @Receiver - override def dealEngineConnStatusCallback( - engineConnStatusCallbackToAM: EngineConnStatusCallbackToAM - ): Unit = { - - if (null == engineConnStatusCallbackToAM.serviceInstance) { - logger.warn(s"call back service instance is null") - } - logger.info(s"Start to deal engineConnStatusCallbackToAM $engineConnStatusCallbackToAM") - val nodeMetrics = new AMNodeMetrics - val heartBeatMsg: java.util.Map[String, Any] = new util.HashMap[String, Any]() - - var initErrorMsg = engineConnStatusCallbackToAM.initErrorMsg - if ( - StringUtils.isNotBlank(initErrorMsg) && initErrorMsg - .getBytes("utf-8") - .length >= initErrorMsgMaxByteNum - ) { - initErrorMsg = initErrorMsg.substring(0, initErrorMsgMaxByteNum) - } - heartBeatMsg.put(AMConstant.START_REASON, initErrorMsg) - - if (engineConnStatusCallbackToAM.canRetry) { - heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry) - } else if (matchRetryLog(engineConnStatusCallbackToAM.initErrorMsg)) { - logger.info(s"match canRetry log ${engineConnStatusCallbackToAM.serviceInstance}") - heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry) - } - - nodeMetrics.setHeartBeatMsg(BDPJettyServerHelper.jacksonJson.writeValueAsString(heartBeatMsg)) - nodeMetrics.setServiceInstance(engineConnStatusCallbackToAM.serviceInstance) - nodeMetrics.setStatus(metricsConverter.convertStatus(engineConnStatusCallbackToAM.status)) - - nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics) - logger.info(s"Finished to deal engineConnStatusCallbackToAM $engineConnStatusCallbackToAM") - - } - - private def matchRetryLog(errorMsg: String): Boolean = { - var flag = false - if (StringUtils.isNotBlank(errorMsg)) { - val errorMsgLowCase = errorMsg.toLowerCase(Locale.getDefault) - canRetryLogs.foreach(canRetry => - if (errorMsgLowCase.contains(canRetry)) { - logger.info(s"match engineConn log fatal logs,is $canRetry") - flag = true - } - ) - } - flag - } - -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.scala index d6ceccef96..11f622e9fe 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineCreateService.scala @@ -18,6 +18,7 @@ package org.apache.linkis.manager.am.service.engine import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.common.utils.{ByteTimeUtils, Logging, Utils} import org.apache.linkis.engineplugin.server.service.EngineConnResourceFactoryService @@ -291,6 +292,7 @@ class DefaultEngineCreateService } }) } + val timeoutEngineResourceRequest = TimeoutEngineResourceRequest( timeout, engineCreateRequest.getUser, @@ -303,13 +305,14 @@ class DefaultEngineCreateService resourceManager.requestResource( LabelUtils.distinctLabel(labelList, emNode.getLabels), resource, + engineCreateRequest, timeout ) match { case AvailableResource(ticketId) => (ticketId, resource) case NotEnoughResource(reason) => - logger.warn(s"not engough resource: $reason") - throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"not engough resource: : $reason") + logger.warn(s"not enough resource: $reason") + throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"not enough resource: : $reason") } } @@ -327,7 +330,8 @@ class DefaultEngineCreateService if (null == engineNodeInfo) return false if (NodeStatus.isCompleted(engineNodeInfo.getNodeStatus)) { val metrics = nodeMetricManagerPersistence.getNodeMetrics(engineNodeInfo) - val (reason, canRetry) = getStartErrorInfo(metrics.getHeartBeatMsg) + val msg = if (metrics != null) metrics.getHeartBeatMsg else null + val (reason, canRetry) = getStartErrorInfo(msg) if (canRetry.isDefined) { throw new LinkisRetryException( AMConstant.ENGINE_ERROR_CODE, diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala index 801114c4bf..3267b698d7 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/DefaultEngineStopService.scala @@ -94,13 +94,17 @@ class DefaultEngineStopService extends AbstractEngineService with EngineStopServ logger.info(s" engineConn does not exist in db: $engineStopRequest ") return } + + val labels = nodeLabelService.getNodeLabels(engineStopRequest.getServiceInstance) + node.setLabels(labels) + // 1. request em to kill ec logger.info(s"Start to kill engine invoke enginePointer ${node.getServiceInstance}") Utils.tryAndErrorMsg { getEMService().stopEngine(node, node.getEMNode) logger.info(s"Finished to kill engine invoke enginePointer ${node.getServiceInstance}") }(s"Failed to stop engine ${node.getServiceInstance}") - node.setLabels(nodeLabelService.getNodeLabels(engineStopRequest.getServiceInstance)) + if (null == node.getNodeStatus) { node.setNodeStatus(NodeStatus.ShuttingDown) } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.scala index 43b1f677b3..2c21bf851f 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineAskEngineService.scala @@ -17,11 +17,24 @@ package org.apache.linkis.manager.am.service.engine -import org.apache.linkis.manager.common.protocol.engine.EngineAskRequest +import org.apache.linkis.manager.common.protocol.engine.{EngineAskRequest, EngineAsyncResponse} import org.apache.linkis.rpc.Sender +import java.util.concurrent.atomic.AtomicInteger + trait EngineAskEngineService { def askEngine(engineAskRequest: EngineAskRequest, sender: Sender): Any } + +object EngineAskEngineService { + private val idCreator = new AtomicInteger() + + private val idPrefix = Sender.getThisServiceInstance.getInstance + + def getAsyncId: String = { + idPrefix + "_" + idCreator.getAndIncrement() + } + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.scala deleted file mode 100644 index b4e498155c..0000000000 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineConnStatusCallbackService.scala +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.manager.am.service.engine - -import org.apache.linkis.manager.common.protocol.engine.EngineConnStatusCallbackToAM - -trait EngineConnStatusCallbackService { - - def dealEngineConnStatusCallback(engineConnStatusCallbackToAM: EngineConnStatusCallbackToAM): Unit - -} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineCreateService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineCreateService.scala index 8e90e222c9..c1efd0f964 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineCreateService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineCreateService.scala @@ -17,7 +17,6 @@ package org.apache.linkis.manager.am.service.engine -import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.exception.LinkisRetryException import org.apache.linkis.manager.common.entity.node.EngineNode import org.apache.linkis.manager.common.protocol.engine.EngineCreateRequest diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala index 228f3e0801..beca547906 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/engine/EngineStopService.scala @@ -17,7 +17,6 @@ package org.apache.linkis.manager.am.service.engine -import org.apache.linkis.manager.am.vo.ResourceVo import org.apache.linkis.manager.common.entity.node.EngineNode import org.apache.linkis.manager.common.protocol.engine.{ EngineConnReleaseRequest, @@ -26,10 +25,6 @@ import org.apache.linkis.manager.common.protocol.engine.{ } import org.apache.linkis.rpc.Sender -import java.util - -import scala.collection.mutable - trait EngineStopService { def stopEngine(engineStopRequest: EngineStopRequest, sender: Sender): Unit diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala index 25db0b8303..45ab197adf 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala @@ -27,6 +27,7 @@ import org.apache.linkis.manager.common.protocol.node.NodeHeartbeatMsg import org.apache.linkis.manager.persistence.{NodeManagerPersistence, NodeMetricManagerPersistence} import org.apache.linkis.manager.service.common.metrics.MetricsConverter import org.apache.linkis.rpc.message.annotation.Receiver +import org.apache.linkis.server.toScalaBuffer import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Service @@ -78,6 +79,7 @@ class AMHeartbeatService extends HeartbeatService with Logging { nodeMetrics.setStatus(0) } nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics) + logger.info(s"Finished to deal nodeHeartbeatMsg $nodeHeartbeatMsg") } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala index 0ccd7fd06f..a8dbe44352 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/label/service/impl/DefaultNodeLabelService.scala @@ -26,7 +26,7 @@ import org.apache.linkis.manager.label.LabelManagerUtils import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext import org.apache.linkis.manager.label.conf.LabelManagerConf import org.apache.linkis.manager.label.entity.{Feature, Label} -import org.apache.linkis.manager.label.score.NodeLabelScorer +import org.apache.linkis.manager.label.score.{LabelScoreServiceInstance, NodeLabelScorer} import org.apache.linkis.manager.label.service.NodeLabelService import org.apache.linkis.manager.label.utils.LabelUtils import org.apache.linkis.manager.persistence.LabelManagerPersistence @@ -312,6 +312,8 @@ class DefaultNodeLabelService extends NodeLabelService with Logging { instanceLabels.keys } + val matchInstanceAndLabels = new util.HashMap[ScoreServiceInstance, util.List[Label[_]]]() + // Get the out-degree relations ( Node -> Label ) val outNodeDegree = labelManagerPersistence.getLabelRelationsByServiceInstance(instances.toList.asJava) @@ -322,12 +324,15 @@ class DefaultNodeLabelService extends NodeLabelService with Logging { else { necessaryLabels.asScala.map(_.getLabelKey).toSet } - // Rebuild in-degree relations - inNodeDegree.clear() - val removeNodes = new ArrayBuffer[ServiceInstance]() - outNodeDegree.asScala.foreach { case (node, iLabels) => - // The core tag must be exactly the same - if (null != necessaryLabels) { + if (null == necessaryLabels || necessaryLabels.isEmpty) { + outNodeDegree.asScala.foreach { case (node, iLabels) => + matchInstanceAndLabels.put( + new LabelScoreServiceInstance(node), + iLabels.asInstanceOf[util.List[Label[_]]] + ) + } + } else { + outNodeDegree.asScala.foreach { case (node, iLabels) => val coreLabelKeys = iLabels.asScala .map(ManagerUtils.persistenceLabelToRealLabel) .filter(_.getFeature == Feature.CORE) @@ -338,33 +343,21 @@ class DefaultNodeLabelService extends NodeLabelService with Logging { coreLabelKeys.asJava ) && coreLabelKeys.size == necessaryLabelKeys.size ) { - iLabels.asScala.foreach(label => { - if (!inNodeDegree.asScala.contains(label)) { - val inNodes = new util.ArrayList[ServiceInstance]() - inNodeDegree.put(label, inNodes) - } - val inNodes = inNodeDegree.get(label) - inNodes.add(node) - }) - } else { - removeNodes += node + matchInstanceAndLabels.put( + new LabelScoreServiceInstance(node), + iLabels.asInstanceOf[util.List[Label[_]]] + ) } } } - - // Remove nodes with mismatched labels - if (removeNodes.nonEmpty && removeNodes.size == outNodeDegree.size()) { - logger.info( - s"The entered labels${necessaryLabels} do not match the labels of the node itself" - ) - } - - removeNodes.foreach(outNodeDegree.remove(_)) - return nodeLabelScorer - .calculate(inNodeDegree, outNodeDegree, labels) - .asInstanceOf[util.Map[ScoreServiceInstance, util.List[Label[_]]]] } - new util.HashMap[ScoreServiceInstance, util.List[Label[_]]]() + // Remove nodes with mismatched labels + if (matchInstanceAndLabels.isEmpty) { + logger.info( + s"The entered labels${necessaryLabels} do not match the labels of the node itself" + ) + } + matchInstanceAndLabels } private def tryToAddLabel(persistenceLabel: PersistenceLabel): Int = { diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/external/yarn/YarnResourceRequester.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/external/yarn/YarnResourceRequester.scala index d7b3349cc7..8891c98935 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/external/yarn/YarnResourceRequester.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/external/yarn/YarnResourceRequester.scala @@ -187,27 +187,40 @@ class YarnResourceRequester extends ExternalResourceRequester with Logging { realQueueName = queueName val childQueues = getChildQueuesOfCapacity(resp \ "scheduler" \ "schedulerInfo") val queue = getQueueOfCapacity(childQueues) - if (queue.isEmpty) { - logger.debug(s"cannot find any information about queue $queueName, response: " + resp) - throw new RMWarnException( - YARN_NOT_EXISTS_QUEUE.getErrorCode, - MessageFormat.format(YARN_NOT_EXISTS_QUEUE.getErrorDesc, queueName) - ) + val queueOption = Option(queue) match { + case Some(queue) => queue + case None => + logger.debug(s"cannot find any information about queue $queueName, response: " + resp) + throw new RMWarnException( + YARN_NOT_EXISTS_QUEUE.getErrorCode, + MessageFormat.format(YARN_NOT_EXISTS_QUEUE.getErrorDesc, queueName) + ) } - (maxEffectiveHandle(queue).get, getYarnResource(queue.map(_ \ "resourcesUsed")).get) + val queueInfo = queueOption.get.asInstanceOf[JObject] + ( + maxEffectiveHandle(queue).get, + getYarnResource(queue.map(_ \ "resourcesUsed")).get, + (queueInfo \ "maxApps").asInstanceOf[JInt].values.toInt, + (queueInfo \ "numPendingApps").asInstanceOf[JInt].values.toInt, + (queueInfo \ "numActiveApps").asInstanceOf[JInt].values.toInt + ) } else if ("fairScheduler".equals(schedulerType)) { val childQueues = getChildQueues(resp \ "scheduler" \ "schedulerInfo" \ "rootQueue") val queue = getQueue(childQueues) - if (queue.isEmpty) { + if (queue.isEmpty || queue.get == null) { logger.debug(s"cannot find any information about queue $queueName, response: " + resp) throw new RMWarnException( YARN_NOT_EXISTS_QUEUE.getErrorCode, MessageFormat.format(YARN_NOT_EXISTS_QUEUE.getErrorDesc, queueName) ) } + val queueInfo = queue.get.asInstanceOf[JObject] ( getYarnResource(queue.map(_ \ "maxResources")).get, - getYarnResource(queue.map(_ \ "usedResources")).get + getYarnResource(queue.map(_ \ "usedResources")).get, + (queueInfo \ "maxApps").asInstanceOf[JInt].values.toInt, + (queueInfo \ "numPendingApps").asInstanceOf[JInt].values.toInt, + (queueInfo \ "numActiveApps").asInstanceOf[JInt].values.toInt ) } else { logger.debug( @@ -225,6 +238,9 @@ class YarnResourceRequester extends ExternalResourceRequester with Logging { val nodeResource = new CommonNodeResource nodeResource.setMaxResource(yarnResource._1) nodeResource.setUsedResource(yarnResource._2) + nodeResource.setMaxApps(yarnResource._3) + nodeResource.setNumPendingApps(yarnResource._4) + nodeResource.setNumActiveApps(yarnResource._5) nodeResource }(t => { throw new RMErrorException( diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala index 8c59f1b17a..b50321e026 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/restful/RMMonitorRest.scala @@ -18,6 +18,7 @@ package org.apache.linkis.manager.rm.restful import org.apache.linkis.common.conf.Configuration +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.manager.common.conf.RMConfiguration import org.apache.linkis.manager.common.entity.enumeration.NodeStatus @@ -25,7 +26,6 @@ import org.apache.linkis.manager.common.entity.node.EngineNode import org.apache.linkis.manager.common.entity.resource._ import org.apache.linkis.manager.common.errorcode.ManagerCommonErrorCodeSummary._ import org.apache.linkis.manager.common.exception.RMErrorException -import org.apache.linkis.manager.common.serializer.NodeResourceSerializer import org.apache.linkis.manager.common.utils.ResourceUtils import org.apache.linkis.manager.label.builder.CombinedLabelBuilder import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext @@ -54,7 +54,6 @@ import org.apache.linkis.server.{toScalaBuffer, BDPJettyServerHelper, Message} import org.apache.linkis.server.security.SecurityFilter import org.apache.linkis.server.utils.ModuleUserUtils -import org.apache.commons.collections4.ListUtils import org.apache.commons.lang3.StringUtils import org.springframework.beans.factory.annotation.Autowired @@ -71,19 +70,18 @@ import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.scala.DefaultScalaModule import com.github.pagehelper.page.PageMethod import com.google.common.collect.Lists -import io.swagger.annotations.{Api, ApiImplicitParams, ApiModel, ApiOperation} -import org.json4s.DefaultFormats -import org.json4s.jackson.Serialization.write +import io.swagger.annotations.{Api, ApiOperation} @RestController @Api(tags = Array("resource management")) @RequestMapping(path = Array("/linkisManager/rm")) class RMMonitorRest extends Logging { - implicit val formats = DefaultFormats + ResourceSerializer + NodeResourceSerializer val mapper = new ObjectMapper() + mapper.registerModule(DefaultScalaModule) private val dateFormatLocal = new ThreadLocal[SimpleDateFormat]() { override protected def initialValue = new SimpleDateFormat("EEE MMM dd HH:mm:ss z yyyy") @@ -126,8 +124,11 @@ class RMMonitorRest extends Logging { var COMBINED_USERCREATOR_ENGINETYPE: String = _ - def appendMessageData(message: Message, key: String, value: AnyRef): Message = - message.data(key, mapper.readTree(write(value))) + def appendMessageData(message: Message, key: String, value: AnyRef): Message = { + val result = mapper.writeValueAsString(value) + logger.info(s"appendMessageData result: $result") + message.data(key, mapper.readTree(result)) + } @ApiOperation(value = "getApplicationList", notes = "get applicationList") @RequestMapping(path = Array("applicationlist"), method = Array(RequestMethod.POST)) @@ -143,92 +144,10 @@ class RMMonitorRest extends Logging { val engineType = if (param.get("engineType") == null) null else param.get("engineType").asInstanceOf[String] val nodes = getEngineNodes(userName, true) - val creatorToApplicationList = new mutable.HashMap[String, mutable.HashMap[String, Any]] - nodes.foreach { node => - val userCreatorLabel = node.getLabels.asScala - .find(_.isInstanceOf[UserCreatorLabel]) - .get - .asInstanceOf[UserCreatorLabel] - val engineTypeLabel = node.getLabels.asScala - .find(_.isInstanceOf[EngineTypeLabel]) - .get - .asInstanceOf[EngineTypeLabel] - if (getUserCreator(userCreatorLabel).equals(userCreator)) { - if (engineType == null || getEngineType(engineTypeLabel).equals(engineType)) { - if (!creatorToApplicationList.contains(userCreatorLabel.getCreator)) { - val applicationList = new mutable.HashMap[String, Any] - applicationList.put("engineInstances", new mutable.ArrayBuffer[Any]) - applicationList.put("usedResource", Resource.initResource(ResourceType.LoadInstance)) - applicationList.put("maxResource", Resource.initResource(ResourceType.LoadInstance)) - applicationList.put("minResource", Resource.initResource(ResourceType.LoadInstance)) - applicationList.put("lockedResource", Resource.initResource(ResourceType.LoadInstance)) - creatorToApplicationList.put(userCreatorLabel.getCreator, applicationList) - } - val applicationList = creatorToApplicationList(userCreatorLabel.getCreator) - applicationList.put( - "usedResource", - (if (applicationList("usedResource") == null) { - Resource.initResource(ResourceType.LoadInstance) - } else { - applicationList("usedResource") - .asInstanceOf[Resource] - }) + node.getNodeResource.getUsedResource - ) - applicationList.put( - "maxResource", - (if (applicationList("maxResource") == null) { - Resource.initResource(ResourceType.LoadInstance) - } else { - applicationList("maxResource") - .asInstanceOf[Resource] - }) + node.getNodeResource.getMaxResource - ) - applicationList.put( - "minResource", - (if (applicationList("minResource") == null) { - Resource.initResource(ResourceType.LoadInstance) - } else { - applicationList("minResource") - .asInstanceOf[Resource] - }) + node.getNodeResource.getMinResource - ) - applicationList.put( - "lockedResource", - (if (applicationList("lockedResource") == null) { - Resource.initResource(ResourceType.LoadInstance) - } else { - applicationList("lockedResource") - .asInstanceOf[Resource] - }) + node.getNodeResource.getLockedResource - ) - val engineInstance = new mutable.HashMap[String, Any] - engineInstance.put("creator", userCreatorLabel.getCreator) - engineInstance.put("engineType", engineTypeLabel.getEngineType) - engineInstance.put("instance", node.getServiceInstance.getInstance) - engineInstance.put("label", engineTypeLabel.getStringValue) - node.setNodeResource( - ResourceUtils.convertTo(node.getNodeResource, ResourceType.LoadInstance) - ) - engineInstance.put("resource", node.getNodeResource) - if (node.getNodeStatus == null) { - engineInstance.put("status", "Busy") - } else { - engineInstance.put("status", node.getNodeStatus.toString) - } - engineInstance.put("startTime", dateFormatLocal.get().format(node.getStartTime)) - engineInstance.put("owner", node.getOwner) - applicationList("engineInstances") - .asInstanceOf[mutable.ArrayBuffer[Any]] - .append(engineInstance) - } - } - } - val applications = creatorToApplicationList.map { creatorEntry => - val application = new mutable.HashMap[String, Any] - application.put("creator", creatorEntry._1) - application.put("applicationList", creatorEntry._2) - application - } + + val creatorToApplicationList = getCreatorToApplicationList(userCreator, engineType, nodes) + + val applications = getApplications(creatorToApplicationList) appendMessageData(message, "applications", applications) message } @@ -338,112 +257,12 @@ class RMMonitorRest extends Logging { node.getLabels.asScala.find(_.isInstanceOf[EngineTypeLabel]).get != null }) } + val userCreatorEngineTypeResourceMap = - new mutable.HashMap[String, mutable.HashMap[String, NodeResource]] - nodes.foreach { node => - val userCreatorLabel = node.getLabels.asScala - .find(_.isInstanceOf[UserCreatorLabel]) - .get - .asInstanceOf[UserCreatorLabel] - val engineTypeLabel = node.getLabels.asScala - .find(_.isInstanceOf[EngineTypeLabel]) - .get - .asInstanceOf[EngineTypeLabel] - val userCreator = getUserCreator(userCreatorLabel) - if (!userCreatorEngineTypeResourceMap.contains(userCreator)) { - userCreatorEngineTypeResourceMap.put(userCreator, new mutable.HashMap[String, NodeResource]) - } - val engineTypeResourceMap = userCreatorEngineTypeResourceMap.get(userCreator).get - val engineType = getEngineType(engineTypeLabel) - if (!engineTypeResourceMap.contains(engineType)) { - val nodeResource = CommonNodeResource.initNodeResource(ResourceType.LoadInstance) - engineTypeResourceMap.put(engineType, nodeResource) - } - val resource = engineTypeResourceMap.get(engineType).get - resource.setUsedResource(node.getNodeResource.getUsedResource + resource.getUsedResource) - // combined label - val combinedLabel = - combinedLabelBuilder.build("", Lists.newArrayList(userCreatorLabel, engineTypeLabel)); - var labelResource = labelResourceService.getLabelResource(combinedLabel) - if (labelResource == null) { - resource.setLeftResource(node.getNodeResource.getMaxResource - resource.getUsedResource) - } else { - labelResource = ResourceUtils.convertTo(labelResource, ResourceType.LoadInstance) - resource.setUsedResource(labelResource.getUsedResource) - resource.setLockedResource(labelResource.getLockedResource) - resource.setLeftResource(labelResource.getLeftResource) - resource.setMaxResource(labelResource.getMaxResource) - } - resource.getLeftResource match { - case dResource: DriverAndYarnResource => - resource.setLeftResource(dResource.loadInstanceResource) - case _ => - } - } - val userCreatorEngineTypeResources = userCreatorEngineTypeResourceMap.map { userCreatorEntry => - val userCreatorEngineTypeResource = new mutable.HashMap[String, Any] - userCreatorEngineTypeResource.put("userCreator", userCreatorEntry._1) - var totalUsedMemory: Long = 0L - var totalUsedCores: Int = 0 - var totalUsedInstances = 0 - var totalLockedMemory: Long = 0L - var totalLockedCores: Int = 0 - var totalLockedInstances: Int = 0 - var totalMaxMemory: Long = 0L - var totalMaxCores: Int = 0 - var totalMaxInstances: Int = 0 - val engineTypeResources = userCreatorEntry._2.map { engineTypeEntry => - val engineTypeResource = new mutable.HashMap[String, Any] - engineTypeResource.put("engineType", engineTypeEntry._1) - val engineResource = engineTypeEntry._2 - val usedResource = engineResource.getUsedResource.asInstanceOf[LoadInstanceResource] - val lockedResource = engineResource.getLockedResource.asInstanceOf[LoadInstanceResource] - val maxResource = engineResource.getMaxResource.asInstanceOf[LoadInstanceResource] - val usedMemory = usedResource.memory - val usedCores = usedResource.cores - val usedInstances = usedResource.instances - totalUsedMemory += usedMemory - totalUsedCores += usedCores - totalUsedInstances += usedInstances - val lockedMemory = lockedResource.memory - val lockedCores = lockedResource.cores - val lockedInstances = lockedResource.instances - totalLockedMemory += lockedMemory - totalLockedCores += lockedCores - totalLockedInstances += lockedInstances - val maxMemory = maxResource.memory - val maxCores = maxResource.cores - val maxInstances = maxResource.instances - totalMaxMemory += maxMemory - totalMaxCores += maxCores - totalMaxInstances += maxInstances + getUserCreatorEngineTypeResourceMap(nodes) + + val userCreatorEngineTypeResources = getUserResources(userCreatorEngineTypeResourceMap) - val memoryPercent = - if (maxMemory > 0) (usedMemory + lockedMemory) / maxMemory.toDouble else 0 - val coresPercent = - if (maxCores > 0) (usedCores + lockedCores) / maxCores.toDouble else 0 - val instancePercent = - if (maxInstances > 0) (usedInstances + lockedInstances) / maxInstances.toDouble else 0 - val maxPercent = Math.max(Math.max(memoryPercent, coresPercent), instancePercent) - engineTypeResource.put("percent", maxPercent.formatted("%.2f")) - engineTypeResource - } - val totalMemoryPercent = - if (totalMaxMemory > 0) (totalUsedMemory + totalLockedMemory) / totalMaxMemory.toDouble - else 0 - val totalCoresPercent = - if (totalMaxCores > 0) (totalUsedCores + totalLockedCores) / totalMaxCores.toDouble - else 0 - val totalInstancePercent = - if (totalMaxInstances > 0) { - (totalUsedInstances + totalLockedInstances) / totalMaxInstances.toDouble - } else 0 - val totalPercent = - Math.max(Math.max(totalMemoryPercent, totalCoresPercent), totalInstancePercent) - userCreatorEngineTypeResource.put("engineTypes", engineTypeResources) - userCreatorEngineTypeResource.put("percent", totalPercent.formatted("%.2f")) - userCreatorEngineTypeResource - } appendMessageData(message, "userResources", userCreatorEngineTypeResources) message } @@ -500,6 +319,7 @@ class RMMonitorRest extends Logging { request: HttpServletRequest, @RequestBody param: util.Map[String, AnyRef] ): Message = { + ModuleUserUtils.getOperationUser(request, "getQueueResource") val message = Message.ok("") val yarnIdentifier = new YarnResourceIdentifier(param.get("queuename").asInstanceOf[String]) val clusterLabel = labelFactory.createLabel(classOf[ClusterLabel]) @@ -532,6 +352,9 @@ class RMMonitorRest extends Logging { "usedPercentage", Map("memory" -> usedMemoryPercentage, "cores" -> usedCPUPercentage) ) + queueInfo.put("maxApps", providedYarnResource.getMaxApps) + queueInfo.put("numActiveApps", providedYarnResource.getNumActiveApps) + queueInfo.put("numPendingApps", providedYarnResource.getNumPendingApps) appendMessageData(message, "queueInfo", queueInfo) case _ => Message.error("Failed to get queue resource") } @@ -780,4 +603,239 @@ class RMMonitorRest extends Logging { .groupBy(_.getOwner) } + private def getUserResources( + userCreatorEngineTypeResourceMap: mutable.HashMap[ + String, + mutable.HashMap[String, NodeResource] + ] + ) = { + + val userCreatorEngineTypeResources = userCreatorEngineTypeResourceMap.map { userCreatorEntry => + val userCreatorEngineTypeResource = new mutable.HashMap[String, Any] + userCreatorEngineTypeResource.put("userCreator", userCreatorEntry._1) + var totalUsedMemory: Long = 0L + var totalUsedCores: Int = 0 + var totalUsedInstances = 0 + var totalLockedMemory: Long = 0L + var totalLockedCores: Int = 0 + var totalLockedInstances: Int = 0 + var totalMaxMemory: Long = 0L + var totalMaxCores: Int = 0 + var totalMaxInstances: Int = 0 + val engineTypeResources = userCreatorEntry._2.map { engineTypeEntry => + val engineTypeResource = new mutable.HashMap[String, Any] + engineTypeResource.put("engineType", engineTypeEntry._1) + val engineResource = engineTypeEntry._2 + val usedResource = engineResource.getUsedResource.asInstanceOf[LoadInstanceResource] + val lockedResource = engineResource.getLockedResource.asInstanceOf[LoadInstanceResource] + val maxResource = engineResource.getMaxResource.asInstanceOf[LoadInstanceResource] + val usedMemory = usedResource.memory + val usedCores = usedResource.cores + val usedInstances = usedResource.instances + totalUsedMemory += usedMemory + totalUsedCores += usedCores + totalUsedInstances += usedInstances + val lockedMemory = lockedResource.memory + val lockedCores = lockedResource.cores + val lockedInstances = lockedResource.instances + totalLockedMemory += lockedMemory + totalLockedCores += lockedCores + totalLockedInstances += lockedInstances + val maxMemory = maxResource.memory + val maxCores = maxResource.cores + val maxInstances = maxResource.instances + totalMaxMemory += maxMemory + totalMaxCores += maxCores + totalMaxInstances += maxInstances + + val memoryPercent = + if (maxMemory > 0) (usedMemory + lockedMemory) / maxMemory.toDouble else 0 + val coresPercent = + if (maxCores > 0) (usedCores + lockedCores) / maxCores.toDouble else 0 + val instancePercent = + if (maxInstances > 0) (usedInstances + lockedInstances) / maxInstances.toDouble else 0 + val maxPercent = Math.max(Math.max(memoryPercent, coresPercent), instancePercent) + engineTypeResource.put("percent", maxPercent.formatted("%.2f")) + engineTypeResource + } + val totalMemoryPercent = + if (totalMaxMemory > 0) (totalUsedMemory + totalLockedMemory) / totalMaxMemory.toDouble + else 0 + val totalCoresPercent = + if (totalMaxCores > 0) (totalUsedCores + totalLockedCores) / totalMaxCores.toDouble + else 0 + val totalInstancePercent = + if (totalMaxInstances > 0) { + (totalUsedInstances + totalLockedInstances) / totalMaxInstances.toDouble + } else 0 + val totalPercent = + Math.max(Math.max(totalMemoryPercent, totalCoresPercent), totalInstancePercent) + userCreatorEngineTypeResource.put("engineTypes", engineTypeResources) + userCreatorEngineTypeResource.put("percent", totalPercent.formatted("%.2f")) + userCreatorEngineTypeResource + } + userCreatorEngineTypeResources + } + + private def getUserCreatorEngineTypeResourceMap(nodes: Array[EngineNode]) = { + val userCreatorEngineTypeResourceMap = + new mutable.HashMap[String, mutable.HashMap[String, NodeResource]] + + for (node <- nodes) { + val userCreatorLabel = node.getLabels.asScala + .find(_.isInstanceOf[UserCreatorLabel]) + .get + .asInstanceOf[UserCreatorLabel] + val engineTypeLabel = node.getLabels.asScala + .find(_.isInstanceOf[EngineTypeLabel]) + .get + .asInstanceOf[EngineTypeLabel] + val userCreator = getUserCreator(userCreatorLabel) + + if (!userCreatorEngineTypeResourceMap.contains(userCreator)) { + userCreatorEngineTypeResourceMap.put(userCreator, new mutable.HashMap[String, NodeResource]) + } + val engineTypeResourceMap = userCreatorEngineTypeResourceMap.get(userCreator).get + val engineType = getEngineType(engineTypeLabel) + if (!engineTypeResourceMap.contains(engineType)) { + val nodeResource = CommonNodeResource.initNodeResource(ResourceType.LoadInstance) + engineTypeResourceMap.put(engineType, nodeResource) + } + val resource = engineTypeResourceMap.get(engineType).get + resource.setUsedResource(node.getNodeResource.getUsedResource + resource.getUsedResource) + // combined label + val combinedLabel = + combinedLabelBuilder.build("", Lists.newArrayList(userCreatorLabel, engineTypeLabel)); + var labelResource = labelResourceService.getLabelResource(combinedLabel) + if (labelResource == null) { + resource.setLeftResource(node.getNodeResource.getMaxResource - resource.getUsedResource) + } else { + labelResource = ResourceUtils.convertTo(labelResource, ResourceType.LoadInstance) + resource.setUsedResource(labelResource.getUsedResource) + resource.setLockedResource(labelResource.getLockedResource) + resource.setLeftResource(labelResource.getLeftResource) + resource.setMaxResource(labelResource.getMaxResource) + } + resource.getLeftResource match { + case dResource: DriverAndYarnResource => + resource.setLeftResource(dResource.loadInstanceResource) + case _ => + } + } + + userCreatorEngineTypeResourceMap + } + + private def getCreatorToApplicationList( + userCreator: String, + engineType: String, + nodes: Array[EngineNode] + ) = { + val creatorToApplicationList = new util.HashMap[String, util.HashMap[String, Any]] + nodes.foreach { node => + val userCreatorLabel = node.getLabels.asScala + .find(_.isInstanceOf[UserCreatorLabel]) + .get + .asInstanceOf[UserCreatorLabel] + val engineTypeLabel = node.getLabels.asScala + .find(_.isInstanceOf[EngineTypeLabel]) + .get + .asInstanceOf[EngineTypeLabel] + if (getUserCreator(userCreatorLabel).equals(userCreator)) { + if (engineType == null || getEngineType(engineTypeLabel).equals(engineType)) { + if (!creatorToApplicationList.containsKey(userCreatorLabel.getCreator)) { + val applicationList = new util.HashMap[String, Any] + applicationList.put("engineInstances", new util.ArrayList[Any]) + applicationList.put("usedResource", Resource.initResource(ResourceType.LoadInstance)) + applicationList.put("maxResource", Resource.initResource(ResourceType.LoadInstance)) + applicationList.put("minResource", Resource.initResource(ResourceType.LoadInstance)) + applicationList.put("lockedResource", Resource.initResource(ResourceType.LoadInstance)) + creatorToApplicationList.put(userCreatorLabel.getCreator, applicationList) + } + val applicationList = creatorToApplicationList.get(userCreatorLabel.getCreator) + applicationList.put( + "usedResource", + (if (applicationList.get("usedResource") == null) { + Resource.initResource(ResourceType.LoadInstance) + } else { + applicationList + .get("usedResource") + .asInstanceOf[Resource] + }) + node.getNodeResource.getUsedResource + ) + applicationList.put( + "maxResource", + (if (applicationList.get("maxResource") == null) { + Resource.initResource(ResourceType.LoadInstance) + } else { + applicationList + .get("maxResource") + .asInstanceOf[Resource] + }) + node.getNodeResource.getMaxResource + ) + applicationList.put( + "minResource", + (if (applicationList.get("minResource") == null) { + Resource.initResource(ResourceType.LoadInstance) + } else { + applicationList + .get("minResource") + .asInstanceOf[Resource] + }) + node.getNodeResource.getMinResource + ) + applicationList.put( + "lockedResource", + (if (applicationList.get("lockedResource") == null) { + Resource.initResource(ResourceType.LoadInstance) + } else { + applicationList + .get("lockedResource") + .asInstanceOf[Resource] + }) + node.getNodeResource.getLockedResource + ) + val engineInstance = new mutable.HashMap[String, Any] + engineInstance.put("creator", userCreatorLabel.getCreator) + engineInstance.put("engineType", engineTypeLabel.getEngineType) + engineInstance.put("instance", node.getServiceInstance.getInstance) + engineInstance.put("label", engineTypeLabel.getStringValue) + node.setNodeResource( + ResourceUtils.convertTo(node.getNodeResource, ResourceType.LoadInstance) + ) + engineInstance.put("resource", node.getNodeResource) + if (node.getNodeStatus == null) { + engineInstance.put("status", "Busy") + } else { + engineInstance.put("status", node.getNodeStatus.toString) + } + engineInstance.put( + "st" + + "artTime", + dateFormatLocal.get().format(node.getStartTime) + ) + engineInstance.put("owner", node.getOwner) + applicationList + .get("engineInstances") + .asInstanceOf[util.ArrayList[Any]] + .add(engineInstance) + } + } + } + creatorToApplicationList + } + + private def getApplications( + creatorToApplicationList: util.HashMap[String, util.HashMap[String, Any]] + ) = { + val applications = new util.ArrayList[util.HashMap[String, Any]]() + val iterator = creatorToApplicationList.entrySet().iterator(); + while (iterator.hasNext) { + val entry = iterator.next() + val application = new util.HashMap[String, Any] + application.put("creator", entry.getKey) + application.put("applicationList", entry.getValue) + applications.add(application) + } + applications + } + } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/RequestResourceService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/RequestResourceService.scala index 70fb1dd9f5..d990fd7045 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/RequestResourceService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/RequestResourceService.scala @@ -22,6 +22,7 @@ import org.apache.linkis.manager.common.constant.RMConstant import org.apache.linkis.manager.common.entity.resource._ import org.apache.linkis.manager.common.errorcode.ManagerCommonErrorCodeSummary._ import org.apache.linkis.manager.common.exception.RMWarnException +import org.apache.linkis.manager.common.protocol.engine.{EngineAskRequest, EngineCreateRequest} import org.apache.linkis.manager.label.entity.em.EMInstanceLabel import org.apache.linkis.manager.rm.domain.RMLabelContainer import org.apache.linkis.manager.rm.exception.RMErrorCode @@ -35,7 +36,11 @@ abstract class RequestResourceService(labelResourceService: LabelResourceService val enableRequest = RMUtils.RM_REQUEST_ENABLE.getValue - def canRequest(labelContainer: RMLabelContainer, resource: NodeResource): Boolean = { + def canRequest( + labelContainer: RMLabelContainer, + resource: NodeResource, + engineCreateRequest: EngineCreateRequest + ): Boolean = { labelContainer.getCurrentLabel match { case emInstanceLabel: EMInstanceLabel => @@ -131,7 +136,10 @@ abstract class RequestResourceService(labelResourceService: LabelResourceService labelAvailableResource, labelResource.getMaxResource ) - throw new RMWarnException(notEnoughMessage._1, notEnoughMessage._2) + throw new RMWarnException( + notEnoughMessage._1, + notEnoughMessage._2 + s"ECM Instance:${emInstanceLabel.getInstance()}" + ) } logger.debug(s"Passed check: resource[${requestResource}] want to use em ${emInstanceLabel .getInstance()} available resource[${labelAvailableResource}]") diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/ResourceManager.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/ResourceManager.scala index 78e414d434..62babebf8e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/ResourceManager.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/ResourceManager.scala @@ -20,6 +20,7 @@ package org.apache.linkis.manager.rm.service import org.apache.linkis.common.ServiceInstance import org.apache.linkis.manager.common.entity.node.EngineNode import org.apache.linkis.manager.common.entity.resource.NodeResource +import org.apache.linkis.manager.common.protocol.engine.{EngineAskRequest, EngineCreateRequest} import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.rm.{ResourceInfo, ResultResource} @@ -46,7 +47,11 @@ abstract class ResourceManager { * @param resource * @return */ - def requestResource(labels: util.List[Label[_]], resource: NodeResource): ResultResource + def requestResource( + labels: util.List[Label[_]], + resource: NodeResource, + engineCreateRequest: EngineCreateRequest + ): ResultResource /** * Request resources and wait for a certain amount of time until the requested resource is met @@ -60,6 +65,7 @@ abstract class ResourceManager { def requestResource( labels: util.List[Label[_]], resource: NodeResource, + engineCreateRequest: EngineCreateRequest, wait: Long ): ResultResource diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.scala index f1284268b3..2b1702069d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DefaultResourceManager.scala @@ -33,6 +33,7 @@ import org.apache.linkis.manager.common.entity.persistence.{ import org.apache.linkis.manager.common.entity.resource._ import org.apache.linkis.manager.common.errorcode.ManagerCommonErrorCodeSummary import org.apache.linkis.manager.common.exception.{RMErrorException, RMWarnException} +import org.apache.linkis.manager.common.protocol.engine.{EngineAskRequest, EngineCreateRequest} import org.apache.linkis.manager.common.utils.{ManagerUtils, ResourceUtils} import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext import org.apache.linkis.manager.label.constant.LabelKeyConstant @@ -52,6 +53,7 @@ import org.apache.linkis.manager.rm.{ ResourceInfo, ResultResource } +import org.apache.linkis.manager.rm.domain.RMLabelContainer import org.apache.linkis.manager.rm.entity.{LabelResourceMapping, ResourceOperationType} import org.apache.linkis.manager.rm.entity.ResourceOperationType.{LOCK, USED} import org.apache.linkis.manager.rm.exception.{RMErrorCode, RMLockFailedRetryException} @@ -152,22 +154,40 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ eMInstanceLabel.setInstance(serviceInstance.getInstance) val emResource = labelResourceService.getLabelResource(eMInstanceLabel) + var registerResourceFlag = true if (emResource != null) { - logger.warn(s"${serviceInstance} has been registered, now update resource.") - if (!emResource.getResourceType.equals(resource.getResourceType)) { - throw new RMErrorException( - RMErrorCode.LABEL_DUPLICATED.getErrorCode, - MessageFormat.format( - RMErrorCode.LABEL_DUPLICATED.getErrorDesc, - serviceInstance, - emResource.getResourceType, - resource.getResourceType - ) + registerResourceFlag = false + logger.warn(s"${serviceInstance} has been registered, resource is ${emResource}.") + val leftResource = emResource.getLeftResource + if (leftResource != null && Resource.getZeroResource(leftResource) > leftResource) { + logger.warn( + s"${serviceInstance} has been registered, but left Resource <0 need to register resource." + ) + registerResourceFlag = true + } + val usedResource = emResource.getLockedResource + emResource.getUsedResource + if (usedResource > emResource.getMaxResource) { + logger.warn( + s"${serviceInstance} has been registered, but usedResource > MaxResource need to register resource." + ) + registerResourceFlag = true + } + + if (!(resource.getMaxResource == emResource.getMaxResource)) { + logger.warn( + s"${serviceInstance} has been registered, but inconsistent newly registered resources need to register resource." ) + registerResourceFlag = true } } + + if (!registerResourceFlag) { + logger.warn(s"${serviceInstance} has been registered, skip register resource.") + return + } val lock = tryLockOneLabel(eMInstanceLabel, -1, Utils.getJvmUser) try { + labelResourceService.removeResourceByLabel(eMInstanceLabel) labelResourceService.setLabelResource( eMInstanceLabel, resource, @@ -190,8 +210,8 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ } /** - * The registration method is mainly used to notify all RM nodes (including the node), and the - * instance is offline. 该注册方法,主要是用于通知所有的RM节点(包括本节点),下线该实例 + * The registration method is mainly used to notify all RM nodes , and the instance is offline. + * 该注册方法,主要是用于通知所有的RM节点(包括本节点),下线该实例 */ override def unregister(serviceInstance: ServiceInstance): Unit = { @@ -199,7 +219,6 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EMInstanceLabel]) eMInstanceLabel.setServiceName(serviceInstance.getApplicationName) eMInstanceLabel.setInstance(serviceInstance.getInstance) - val ecNodes = nodeManagerPersistence.getEngineNodeByEM(serviceInstance).asScala val lock = tryLockOneLabel(eMInstanceLabel, -1, Utils.getJvmUser) try { labelResourceService.removeResourceByLabel(eMInstanceLabel) @@ -217,13 +236,6 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ resourceLockService.unLock(lock) logger.info(s"Finished to clear ecm resource:${serviceInstance}") } - ecNodes.foreach { engineNode => - Utils.tryAndWarn { - engineNode.setLabels(nodeLabelService.getNodeLabels(engineNode.getServiceInstance)) - engineNode.setNodeStatus(NodeStatus.Failed) - engineStopService.engineConnInfoClear(engineNode) - } - } logger.info(s"Finished to clear ec for ecm ${serviceInstance}") } @@ -236,9 +248,10 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ */ override def requestResource( labels: util.List[Label[_]], - resource: NodeResource + resource: NodeResource, + engineCreateRequest: EngineCreateRequest ): ResultResource = { - requestResource(labels, resource, -1) + requestResource(labels, resource, engineCreateRequest, -1) } /** @@ -253,6 +266,7 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ override def requestResource( labels: util.List[Label[_]], resource: NodeResource, + engineCreateRequest: EngineCreateRequest, wait: Long ): ResultResource = { val labelContainer = labelResourceService.enrichLabels(labels) @@ -268,7 +282,7 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ // check ecm resource if not enough return Utils.tryCatch { labelContainer.setCurrentLabel(emInstanceLabel) - if (!requestResourceService.canRequest(labelContainer, resource)) { + if (!requestResourceService.canRequest(labelContainer, resource, engineCreateRequest)) { return NotEnoughResource(s"Labels:${emInstanceLabel.getStringValue} not enough resource") } } { @@ -286,7 +300,7 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ ) Utils.tryCatch { labelContainer.setCurrentLabel(userCreatorEngineTypeLabel) - if (!requestResourceService.canRequest(labelContainer, resource)) { + if (!requestResourceService.canRequest(labelContainer, resource, engineCreateRequest)) { return NotEnoughResource( s"Labels:${userCreatorEngineTypeLabel.getStringValue} not enough resource" ) @@ -320,22 +334,19 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ } { persistenceLocks.foreach(resourceLockService.unLock) } - // record engine locked resource - val tickedId = UUID.randomUUID().toString - resourceLogService.recordUserResourceAction( - labelContainer, - tickedId, - ChangeType.ENGINE_REQUEST, - resource.getLockedResource - ) + + // add ec node + val tickedId = RMUtils.getECTicketID val emNode = new AMEMNode emNode.setServiceInstance(labelContainer.getEMInstanceLabel.getServiceInstance) val engineNode = new AMEngineNode engineNode.setEMNode(emNode) engineNode.setServiceInstance(ServiceInstance(labelContainer.getEngineServiceName, tickedId)) engineNode.setNodeResource(resource) + engineNode.setTicketId(tickedId) nodeManagerPersistence.addEngineNode(engineNode) + // add labels val engineInstanceLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EngineInstanceLabel]) engineInstanceLabel.setServiceName(labelContainer.getEngineServiceName) @@ -343,11 +354,20 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ nodeLabelService.addLabelToNode(engineNode.getServiceInstance, engineInstanceLabel) + // add ec resource labelResourceService.setEngineConnLabelResource( engineInstanceLabel, resource, labelContainer.getCombinedUserCreatorEngineTypeLabel.getStringValue ) + // record engine locked resource + labelContainer.getLabels.add(engineInstanceLabel) + resourceLogService.recordUserResourceAction( + labelContainer, + tickedId, + ChangeType.ENGINE_REQUEST, + resource.getLockedResource + ) val persistenceLabel = labelFactory.convertLabel(engineInstanceLabel, classOf[PersistenceLabel]) val persistenceEngineLabel = labelManagerPersistence.getLabelByKeyValue( @@ -710,7 +730,7 @@ class DefaultResourceManager extends ResourceManager with Logging with Initializ var heartbeatMsgMetrics = "" Utils.tryAndWarn { val oldMetrics = nodeMetricManagerPersistence.getNodeMetrics(ecNode) - if (StringUtils.isNotBlank(oldMetrics.getHeartBeatMsg)) { + if (oldMetrics != null && StringUtils.isNotBlank(oldMetrics.getHeartBeatMsg)) { heartbeatMsgMetrics = oldMetrics.getHeartBeatMsg } } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.scala index e82ff8383c..bebb34e00c 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/DriverAndYarnReqResourceService.scala @@ -21,12 +21,16 @@ import org.apache.linkis.manager.common.constant.RMConstant import org.apache.linkis.manager.common.entity.resource._ import org.apache.linkis.manager.common.entity.resource.ResourceType.DriverAndYarn import org.apache.linkis.manager.common.exception.RMWarnException +import org.apache.linkis.manager.common.protocol.engine.{EngineAskRequest, EngineCreateRequest} +import org.apache.linkis.manager.label.entity.cluster.ClusterLabel import org.apache.linkis.manager.rm.domain.RMLabelContainer import org.apache.linkis.manager.rm.exception.RMErrorCode import org.apache.linkis.manager.rm.external.service.ExternalResourceService import org.apache.linkis.manager.rm.external.yarn.YarnResourceIdentifier import org.apache.linkis.manager.rm.service.{LabelResourceService, RequestResourceService} -import org.apache.linkis.manager.rm.utils.RMUtils +import org.apache.linkis.manager.rm.utils.{AcrossClusterRulesJudgeUtils, RMUtils} + +import org.apache.commons.lang3.StringUtils import org.json4s.DefaultFormats @@ -39,10 +43,15 @@ class DriverAndYarnReqResourceService( override val resourceType: ResourceType = DriverAndYarn - override def canRequest(labelContainer: RMLabelContainer, resource: NodeResource): Boolean = { - if (!super.canRequest(labelContainer, resource)) { + override def canRequest( + labelContainer: RMLabelContainer, + resource: NodeResource, + engineCreateRequest: EngineCreateRequest + ): Boolean = { + if (!super.canRequest(labelContainer, resource, engineCreateRequest)) { return false } + val requestedDriverAndYarnResource = resource.getMaxResource.asInstanceOf[DriverAndYarnResource] val requestedYarnResource = requestedDriverAndYarnResource.yarnResource @@ -65,7 +74,9 @@ class DriverAndYarnReqResourceService( val notEnoughMessage = generateQueueNotEnoughMessage(requestedYarnResource, queueLeftResource, maxCapacity) throw new RMWarnException(notEnoughMessage._1, notEnoughMessage._2) - } else true + } + + true } def generateQueueNotEnoughMessage( @@ -86,7 +97,8 @@ class DriverAndYarnReqResourceService( RMConstant.CPU_UNIT, yarn.queueCores, yarnAvailable.queueCores, - maxYarn.queueCores + maxYarn.queueCores, + yarn.queueName ) ) } else if (yarn.queueMemory > yarnAvailable.queueMemory) { @@ -98,7 +110,8 @@ class DriverAndYarnReqResourceService( RMConstant.MEMORY_UNIT_BYTE, yarn.queueMemory, yarnAvailable.queueMemory, - maxYarn.queueMemory + maxYarn.queueMemory, + yarn.queueName ) ) } else { @@ -110,7 +123,8 @@ class DriverAndYarnReqResourceService( RMConstant.INSTANCE_UNIT, yarn.queueInstances, yarnAvailable.queueInstances, - maxYarn.queueInstances + maxYarn.queueInstances, + yarn.queueName ) ) } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala index e332854bc2..2d67edb9a7 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/service/impl/ResourceLogService.scala @@ -25,7 +25,11 @@ import org.apache.linkis.manager.common.entity.resource.Resource import org.apache.linkis.manager.dao.ECResourceRecordMapper import org.apache.linkis.manager.label.entity.CombinedLabel import org.apache.linkis.manager.label.entity.em.EMInstanceLabel -import org.apache.linkis.manager.label.entity.engine.EngineInstanceLabel +import org.apache.linkis.manager.label.entity.engine.{ + EngineInstanceLabel, + EngineTypeLabel, + UserCreatorLabel +} import org.apache.linkis.manager.rm.domain.RMLabelContainer import org.apache.linkis.manager.rm.utils.RMUtils @@ -150,7 +154,11 @@ class ResourceLogService extends Logging { if (null == userCreatorEngineType) return var ecResourceInfoRecord = ecResourceRecordMapper.getECResourceInfoRecord(ticketId) if (ecResourceInfoRecord == null) { - val logDirSuffix = getECLogDirSuffix(labelContainer, ticketId) + val logDirSuffix = ECPathUtils.getECLogDirSuffix( + labelContainer.getEngineTypeLabel, + labelContainer.getUserCreatorLabel, + ticketId + ) val user = if (null != labelContainer.getUserCreatorLabel) labelContainer.getUserCreatorLabel.getUser else "" @@ -200,20 +208,6 @@ class ResourceLogService extends Logging { ecResourceRecordMapper.updateECResourceInfoRecord(ecResourceInfoRecord) } - def getECLogDirSuffix(labelContainer: RMLabelContainer, ticketId: String): String = { - val engineTypeLabel = labelContainer.getEngineTypeLabel - val userCreatorLabel = labelContainer.getUserCreatorLabel - if (null == engineTypeLabel || null == userCreatorLabel) { - return "" - } - val suffix = ECPathUtils.getECWOrkDirPathSuffix( - userCreatorLabel.getUser, - ticketId, - engineTypeLabel.getEngineType - ) - suffix + File.separator + "logs" - } - } object ChangeType { diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/AcrossClusterRulesJudgeUtils.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/AcrossClusterRulesJudgeUtils.scala new file mode 100644 index 0000000000..51cf36bca1 --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/AcrossClusterRulesJudgeUtils.scala @@ -0,0 +1,65 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.rm.utils + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.manager.common.entity.resource.YarnResource + +object AcrossClusterRulesJudgeUtils extends Logging { + + def acrossClusterRuleJudge( + leftResource: YarnResource, + usedResource: YarnResource, + maxResource: YarnResource, + leftCPUThreshold: Int, + leftMemoryThreshold: Int, + UsedCPUPercentageThreshold: Double, + UsedMemoryPercentageThreshold: Double + ): Boolean = { + if (leftResource != null && usedResource != null && maxResource != null) { + val leftQueueMemory = leftResource.queueMemory / Math.pow(1024, 3).toLong + logger.info( + s"leftResource.queueCores: ${leftResource.queueCores}, leftCPUThreshold: $leftCPUThreshold," + + s"leftQueueMemory: $leftQueueMemory, leftMemoryThreshold: $leftMemoryThreshold" + ) + + if (leftResource.queueCores > leftCPUThreshold && leftQueueMemory > leftMemoryThreshold) { + + val usedCPUPercentage = + usedResource.queueCores.asInstanceOf[Double] / maxResource.queueCores + .asInstanceOf[Double] + val usedMemoryPercentage = usedResource.queueMemory + .asInstanceOf[Double] / maxResource.queueMemory.asInstanceOf[Double] + + logger.info( + s"usedCPUPercentage: $usedCPUPercentage, UsedCPUPercentageThreshold: $UsedCPUPercentageThreshold" + + s"usedMemoryPercentage: $usedMemoryPercentage, UsedMemoryPercentageThreshold: $UsedMemoryPercentageThreshold" + ) + + if ( + usedCPUPercentage < UsedCPUPercentageThreshold && usedMemoryPercentage < UsedMemoryPercentageThreshold + ) { + return true + } + } + } + + false + } + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/RMUtils.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/RMUtils.scala index ca5f420693..2b74c31079 100644 --- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/RMUtils.scala +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/org/apache/linkis/manager/rm/utils/RMUtils.scala @@ -28,7 +28,10 @@ import org.apache.linkis.manager.rm.conf.ResourceStatus import org.apache.linkis.manager.rm.restful.vo.UserResourceVo import org.apache.linkis.server.BDPJettyServerHelper +import org.apache.commons.lang3.StringUtils + import java.util +import java.util.UUID import scala.collection.JavaConverters.asScalaBufferConverter @@ -213,7 +216,8 @@ object RMUtils extends Logging { unitType: String, requestResource: Any, availableResource: Any, - maxResource: Any + maxResource: Any, + queueName: String = "" ): String = { def dealMemory(resourceType: String, unitType: String, resource: Any): String = { @@ -222,7 +226,7 @@ object RMUtils extends Logging { if (logger.isDebugEnabled()) { logger.debug(s"Will change ${resource.toString} from ${unitType} to GB") } - ByteTimeUtils.byteStringAsGb(resource.toString + "b").toString + "GB" + ByteTimeUtils.negativeByteStringAsGb(resource.toString + "b").toString + "GB" } { case e: Exception => logger.error(s"Cannot convert ${resource} to Gb, " + e.getMessage) resource.toString + unitType @@ -241,7 +245,13 @@ object RMUtils extends Logging { val maxMsg = if (null == maxResource) "null" + unitType else dealMemory(resourceType, unitType, maxResource.toString) - s" user ${resourceType}, requestResource : ${reqMsg} > availableResource : ${availMsg}, maxResource : ${maxMsg}." + if (StringUtils.isEmpty(queueName)) { + s" use ${resourceType}, requestResource : ${reqMsg} > availableResource : ${availMsg}, maxResource : ${maxMsg}." + } else { + s" use ${resourceType}, requestResource : ${reqMsg} > availableResource : ${availMsg}, maxResource : ${maxMsg}, queueName : ${queueName}." + } } + def getECTicketID: String = UUID.randomUUID().toString + } diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/am/util/ECResourceInfoUtilsTest.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/am/util/ECResourceInfoUtilsTest.java new file mode 100644 index 0000000000..1d1ce10cfd --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/java/org/apache/linkis/manager/am/util/ECResourceInfoUtilsTest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.am.util; + +import org.apache.linkis.common.utils.ByteTimeUtils; +import org.apache.linkis.manager.am.vo.ResourceVo; +import org.apache.linkis.manager.common.entity.persistence.ECResourceInfoRecord; +import org.apache.linkis.server.BDPJettyServerHelper; + +import java.util.HashMap; +import java.util.Map; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +/** ECResourceInfoUtilsTest */ +public class ECResourceInfoUtilsTest { + + @Test + public void testGetStringToMap() throws Exception { + ECResourceInfoRecord info = new ECResourceInfoRecord(); + info.setLabelValue("hadoop-LINKISCLI,spark-2.4.3"); + String str = + "{\"driver\":{\"instance\":1,\"memory\":\"3.0 GB\",\"cpu\":1}, \"yarn\":{\"queueName\":\"dws\",\"queueMemory\":\"2.0 GB\", \"queueCpu\":2, \"instance\":0}} "; + Map map = BDPJettyServerHelper.gson().fromJson(str, new HashMap<>().getClass()); + ResourceVo resourceVO = ECResourceInfoUtils.getStringToMap(str, info); + Map diverMap = (Map) map.get("driver"); + Assertions.assertEquals( + resourceVO.getInstance(), ((Double) diverMap.get("instance")).intValue()); + Assertions.assertEquals(resourceVO.getInstance(), 1); + Assertions.assertEquals(resourceVO.getCores(), ((Double) diverMap.get("cpu")).intValue()); + Assertions.assertEquals(resourceVO.getCores(), 1); + Assertions.assertEquals( + resourceVO.getMemory(), + ByteTimeUtils.byteStringAsBytes(String.valueOf(diverMap.getOrDefault("memory", "0k")))); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/rm/utils/RMUtilsTest.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/rm/utils/RMUtilsTest.scala new file mode 100644 index 0000000000..35039a394f --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/test/scala/org/apache/linkis/manager/rm/utils/RMUtilsTest.scala @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.rm.utils + +import org.junit.jupiter.api.Test + +class RMUtilsTest { + + @Test def getResourceInfoMsg(): Unit = { + val resourceType: String = "Memory" + val unitType: String = "bytes" + val requestResource: Any = "644245094400" // 600G + val availableResource: Any = "-2147483648" // -2G + val maxResource: Any = "20454781747200" // 19050G + val result = RMUtils.getResourceInfoMsg( + resourceType, + unitType, + requestResource, + availableResource, + maxResource + ) + assert( + " use Memory, requestResource : 600GB > availableResource : -2GB, maxResource : 19050GB." + .equals(result) + ) + } + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java index bcb14a7045..8358e43ce9 100644 --- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/constant/LabelKeyConstant.java @@ -63,4 +63,8 @@ public class LabelKeyConstant { public static final String TENANT_KEY = "tenant"; public static final String FIXED_EC_KEY = "fixedEngineConn"; + + public static final String TEMPLATE_CONF_KEY = "ec.conf.templateId"; + + public static final String MANAGER_KEY = "manager"; } diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/cluster/ClusterLabel.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/cluster/ClusterLabel.java index c0fa569594..499adb496b 100644 --- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/cluster/ClusterLabel.java +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/cluster/ClusterLabel.java @@ -18,13 +18,18 @@ package org.apache.linkis.manager.label.entity.cluster; import org.apache.linkis.manager.label.constant.LabelKeyConstant; -import org.apache.linkis.manager.label.entity.Feature; -import org.apache.linkis.manager.label.entity.GenericLabel; +import org.apache.linkis.manager.label.entity.*; import org.apache.linkis.manager.label.entity.annon.ValueSerialNum; +import org.apache.linkis.manager.label.exception.LabelErrorException; + +import org.apache.commons.lang3.StringUtils; import java.util.HashMap; -public class ClusterLabel extends GenericLabel { +import static org.apache.linkis.manager.label.errorcode.LabelCommonErrorCodeSummary.LABEL_ERROR_CODE; + +public class ClusterLabel extends GenericLabel + implements EMNodeLabel, EngineNodeLabel, UserModifiable { public ClusterLabel() { setLabelKey(LabelKeyConstant.YARN_CLUSTER_KEY); @@ -64,4 +69,14 @@ public String getClusterType() { } return null; } + + @Override + public void valueCheck(String stringValue) throws LabelErrorException { + if (!StringUtils.isEmpty(stringValue)) { + if (stringValue.split(SerializableLabel.VALUE_SEPARATOR).length != 2) { + throw new LabelErrorException( + LABEL_ERROR_CODE.getErrorCode(), LABEL_ERROR_CODE.getErrorDesc()); + } + } + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/ManagerLabel.java b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/ManagerLabel.java new file mode 100644 index 0000000000..674cc605af --- /dev/null +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/ManagerLabel.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.manager.label.entity.engine; + +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.manager.label.entity.EngineNodeLabel; +import org.apache.linkis.manager.label.entity.Feature; +import org.apache.linkis.manager.label.entity.GenericLabel; +import org.apache.linkis.manager.label.entity.annon.ValueSerialNum; + +import java.util.HashMap; + +public class ManagerLabel extends GenericLabel implements EngineNodeLabel { + + public ManagerLabel() { + setLabelKey(LabelKeyConstant.MANAGER_KEY); + } + + @Override + public Feature getFeature() { + return Feature.CORE; + } + + public String getManager() { + if (null == getValue()) { + return null; + } + return getValue().get(getLabelKey()); + } + + @ValueSerialNum(0) + public void setManager(String manager) { + if (null == getValue()) { + setValue(new HashMap<>()); + } + getValue().put(getLabelKey(), manager); + } +} diff --git a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/RunType.scala b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/RunType.scala index f8ba133f32..7ef0ce0a5e 100644 --- a/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/RunType.scala +++ b/linkis-computation-governance/linkis-manager/linkis-label-common/src/main/java/org/apache/linkis/manager/label/entity/engine/RunType.scala @@ -42,6 +42,7 @@ object RunType extends Enumeration { val ES_JSON = Value("esjson") val TRINO_SQL = Value("tsql") + val JSON = Value("json") val SEATUNNEL_FLINK_SQL = Value("sfsql") diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/constant/AMConstant.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/constant/AMConstant.java index 081e5e605e..09d802a951 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/constant/AMConstant.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/constant/AMConstant.java @@ -34,4 +34,30 @@ public class AMConstant { public static final String START_REASON = "start_reason"; public static final String EC_CAN_RETRY = "ec_can_try"; + + public static final String EC_ASYNC_START_ID_KEY = "ecAsyncStartId"; + + public static final String EC_ASYNC_START_MANAGER_INSTANCE_KEY = "managerInstance"; + + /* + result : starting,success,failed + */ + public static final String EC_ASYNC_START_RESULT_KEY = "ecAsyncStartResult"; + + /* + default false + */ + public static final String EC_SYNC_START_KEY = "ecSyncStart"; + + public static final String EC_ASYNC_START_RESULT_SUCCESS = "success"; + + public static final String EC_ASYNC_START_RESULT_FAIL = "failed"; + + public static final String EC_ASYNC_START_RESULT_STARTING = "starting"; + + public static final String EC_ASYNC_START_FAIL_RETRY_KEY = "canRetry"; + + public static final String EC_ASYNC_START_FAIL_MSG_KEY = "failMsg"; + + public static final String EC_METRICS_KEY = "ecMetrics"; } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java index 132bc32bbd..34ba15601c 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEMNode.java @@ -43,6 +43,9 @@ public class AMEMNode implements EMNode, ScoreServiceInstance { private String owner; private String mark; + private String identifier; + + private String ticketId; private NodeTaskInfo nodeTaskInfo; @@ -139,6 +142,16 @@ public void setMark(String mark) { this.mark = mark; } + @Override + public String getIdentifier() { + return identifier; + } + + @Override + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + @Override public NodeResource getNodeResource() { return nodeResource; @@ -179,6 +192,16 @@ public void setNodeHealthyInfo(NodeHealthyInfo nodeHealthyInfo) { this.nodeHealthyInfo = nodeHealthyInfo; } + @Override + public String getTicketId() { + return ticketId; + } + + @Override + public void setTicketId(String ticketId) { + this.ticketId = ticketId; + } + @Override public String toString() { return "AMEMNode{" diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java index 40107aaba8..4e9ff861d0 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/AMEngineNode.java @@ -48,6 +48,7 @@ public class AMEngineNode implements EngineNode, ScoreServiceInstance { private String owner; private String mark; + private String identifier; private NodeTaskInfo nodeTaskInfo; @@ -61,6 +62,8 @@ public class AMEngineNode implements EngineNode, ScoreServiceInstance { private String ticketId; + private String ecMetrics; + public AMEngineNode() {} public AMEngineNode(double score, ServiceInstance serviceInstance) { @@ -129,6 +132,16 @@ public void setMark(String mark) { this.mark = mark; } + @Override + public String getIdentifier() { + return identifier; + } + + @Override + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + @Override public EMNode getEMNode() { return this.emNode; @@ -199,6 +212,16 @@ public void setTicketId(String ticketId) { this.ticketId = ticketId; } + @Override + public String getEcMetrics() { + return ecMetrics; + } + + @Override + public void setEcMetrics(String metrics) { + this.ecMetrics = metrics; + } + @Override public Date getUpdateTime() { return updateTime; diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/EngineNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/EngineNode.java index 3a77112850..627b41bc55 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/EngineNode.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/EngineNode.java @@ -30,4 +30,8 @@ public interface EngineNode extends AMNode, RMNode, LabelNode { String getTicketId(); void setTicketId(String ticketId); + + String getEcMetrics(); + + void setEcMetrics(String metrics); } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java index c6a329e95c..c9b54bed42 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/InfoRMNode.java @@ -32,6 +32,7 @@ public class InfoRMNode implements RMNode { private String owner; private String mark; + private String identifier; private NodeStatus nodeStatus; @@ -39,6 +40,8 @@ public class InfoRMNode implements RMNode { private Date updateTime; + private String ticketId; + @Override public NodeResource getNodeResource() { return nodeResource; @@ -79,6 +82,16 @@ public String getMark() { return mark; } + @Override + public String getIdentifier() { + return identifier; + } + + @Override + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + @Override public Date getUpdateTime() { return updateTime; @@ -98,4 +111,14 @@ public Date getStartTime() { public void setStartTime(Date startTime) { this.startTime = startTime; } + + @Override + public String getTicketId() { + return ticketId; + } + + @Override + public void setTicketId(String ticketId) { + this.ticketId = ticketId; + } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java index 135ff76db5..a89c1552cc 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/node/Node.java @@ -44,4 +44,12 @@ public interface Node extends RequestProtocol { Date getStartTime(); void setStartTime(Date startTime); + + String getIdentifier(); + + void setIdentifier(String identifier); + + String getTicketId(); + + void setTicketId(String ticketId); } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/ECResourceInfoRecord.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/ECResourceInfoRecord.java index 9d7c08158d..165606a090 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/ECResourceInfoRecord.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/ECResourceInfoRecord.java @@ -20,6 +20,8 @@ import org.apache.linkis.manager.common.entity.resource.Resource; import org.apache.linkis.manager.common.utils.ResourceUtils; +import org.apache.commons.lang3.StringUtils; + import java.util.Date; public class ECResourceInfoRecord { @@ -87,6 +89,20 @@ public String getLabelValue() { return labelValue; } + /** + * label value is userCreator and engineTypeLabel,engineType is the second eg + * "hadoop-IDE,spark-2.4.3" + * + * @return + */ + public String getEngineType() { + if (StringUtils.isNotBlank(labelValue)) { + return labelValue.split(",")[1].split("-")[0]; + } else { + return ""; + } + } + public void setLabelValue(String labelValue) { this.labelValue = labelValue; } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java index 006bcf06ff..770a2e528a 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNode.java @@ -26,6 +26,10 @@ public class PersistenceNode { private String owner; private String mark; + /** identifier if mark equals "process", then identifier equals pid */ + private String identifier; + + private String ticketId; private Date updateTime; private Date createTime; @@ -40,6 +44,22 @@ public void setMark(String mark) { this.mark = mark; } + public String getIdentifier() { + return identifier; + } + + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + public String getTicketId() { + return ticketId; + } + + public void setTicketId(String ticketId) { + this.ticketId = ticketId; + } + public Integer getId() { return id; } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java index 3eefbc75cc..0a2452551a 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java @@ -28,12 +28,15 @@ public class PersistenceNodeEntity implements Node { private ServiceInstance serviceInstance; private String owner; private String mark; + private String identifier; private NodeStatus nodeStatus; private Date startTime; private Date updateTime; + private String ticketId; + @Override public Date getUpdateTime() { return updateTime; @@ -88,6 +91,26 @@ public String getMark() { return this.mark; } + @Override + public String getIdentifier() { + return identifier; + } + + @Override + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + + @Override + public String getTicketId() { + return ticketId; + } + + @Override + public void setTicketId(String ticketId) { + this.ticketId = ticketId; + } + public void setOwner(String owner) { this.owner = owner; } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java index 01ecc24fc9..12ddf17b28 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java @@ -21,6 +21,7 @@ import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; import java.util.Date; +import java.util.Objects; public class PersistenceNodeMetrics implements NodeMetrics { @@ -59,7 +60,9 @@ public Integer getStatus() { } public void setStatus(Integer status) { - this.status = status; + if (Objects.nonNull(status)) { + this.status = status; + } } @Override diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistencerEcNodeInfo.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistencerEcNodeInfo.java index 11665fb851..2578433052 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistencerEcNodeInfo.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/persistence/PersistencerEcNodeInfo.java @@ -24,6 +24,8 @@ public class PersistencerEcNodeInfo extends PersistenceNode { private String engineType; + private String heartbeatMsg; + public Integer getInstanceStatus() { return instanceStatus; } @@ -40,6 +42,14 @@ public void setEngineType(String engineType) { this.engineType = engineType; } + public String getHeartbeatMsg() { + return heartbeatMsg; + } + + public void setHeartbeatMsg(String heartbeatMsg) { + this.heartbeatMsg = heartbeatMsg; + } + @Override public String toString() { return "PersistencerEcNodeInfo{" @@ -48,7 +58,10 @@ public String toString() { + ", engineType='" + engineType + '\'' - + "} " + + ", heartbeatMsg='" + + heartbeatMsg + + '\'' + + '}' + super.toString(); } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/CommonNodeResource.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/CommonNodeResource.java index 76c5a8213e..44a81e783d 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/CommonNodeResource.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/CommonNodeResource.java @@ -41,6 +41,12 @@ public class CommonNodeResource implements NodeResource { private Date updateTime; + private Integer maxApps; + + private Integer numPendingApps; + + private Integer numActiveApps; + public static NodeResource initNodeResource(ResourceType resourceType) { CommonNodeResource commonNodeResource = new CommonNodeResource(); commonNodeResource.setResourceType(resourceType); @@ -158,6 +164,30 @@ public void setLeftResource(Resource leftResource) { this.leftResource = leftResource; } + public Integer getMaxApps() { + return maxApps; + } + + public void setMaxApps(Integer maxApps) { + this.maxApps = maxApps; + } + + public Integer getNumPendingApps() { + return numPendingApps; + } + + public void setNumPendingApps(Integer numPendingApps) { + this.numPendingApps = numPendingApps; + } + + public Integer getNumActiveApps() { + return numActiveApps; + } + + public void setNumActiveApps(Integer numActiveApps) { + this.numActiveApps = numActiveApps; + } + @Override public String toString() { return "CommonNodeResource{" diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/NodeResource.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/NodeResource.java index 6f6290e550..e701ef5302 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/NodeResource.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/entity/resource/NodeResource.java @@ -63,4 +63,16 @@ public interface NodeResource extends Serializable, RequestProtocol { void setLeftResource(Resource leftResource); Resource getLeftResource(); + + void setMaxApps(Integer maxApps); + + Integer getMaxApps(); + + void setNumPendingApps(Integer numPendingApps); + + Integer getNumPendingApps(); + + void setNumActiveApps(Integer numActiveApps); + + Integer getNumActiveApps(); } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java index 90515c48bc..ce14d09c46 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/protocol/engine/EngineStopRequest.java @@ -23,8 +23,13 @@ public class EngineStopRequest implements EngineRequest, RequestMethod { private ServiceInstance serviceInstance; - + private String logDirSuffix; + private String engineType; private String user; + /** identifierType, Reserved for ec containerized startup scenarios */ + private String identifierType; + /** identifier */ + private String identifier; public EngineStopRequest() {} @@ -41,6 +46,38 @@ public void setServiceInstance(ServiceInstance serviceInstance) { this.serviceInstance = serviceInstance; } + public String getLogDirSuffix() { + return logDirSuffix; + } + + public void setLogDirSuffix(String logDirSuffix) { + this.logDirSuffix = logDirSuffix; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getIdentifierType() { + return identifierType; + } + + public void setIdentifierType(String identifierType) { + this.identifierType = identifierType; + } + + public String getIdentifier() { + return identifier; + } + + public void setIdentifier(String identifier) { + this.identifier = identifier; + } + public void setUser(String user) { this.user = user; } @@ -60,9 +97,21 @@ public String toString() { return "EngineStopRequest{" + "serviceInstance=" + serviceInstance + + ", logDirSuffix='" + + logDirSuffix + + '\'' + + ", engineType='" + + engineType + + '\'' + ", user='" + user + '\'' + + ", identifierType='" + + identifierType + + '\'' + + ", identifier='" + + identifier + + '\'' + '}'; } } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/utils/ManagerUtils.java b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/utils/ManagerUtils.java index 53137e4a8c..10f4b99efa 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/utils/ManagerUtils.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/java/org/apache/linkis/manager/common/utils/ManagerUtils.java @@ -17,6 +17,7 @@ package org.apache.linkis.manager.common.utils; +import org.apache.linkis.common.utils.Utils; import org.apache.linkis.manager.common.conf.ManagerCommonConf; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory; import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext; @@ -44,7 +45,7 @@ public static String getAdminUser() { if (StringUtils.isNotBlank(ManagerCommonConf.DEFAULT_ADMIN().getValue())) { return ManagerCommonConf.DEFAULT_ADMIN().getValue(); } - return System.getProperty("user.name"); + return Utils.getJvmUser(); } public static Label persistenceLabelToRealLabel(Label label) { diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/entity/resource/Resource.scala b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/entity/resource/Resource.scala index 47b6515849..0cfb4ae055 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/entity/resource/Resource.scala +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/entity/resource/Resource.scala @@ -436,14 +436,8 @@ class DriverAndYarnResource( } def isModuleOperate(r: Resource): Boolean = { - if (this.isModuleOperate || r.isModuleOperate) { - true - } else if (this.yarnResource.queueName.equals(r.yarnResource.queueName)) { - logger.debug(s"Not module operate this:$this other:$r") - false - } else { - true - } + // todo consider optimization + false } def isModuleOperate: Boolean = { diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.scala b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.scala index 20fa958e7d..c22f8824c2 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.scala +++ b/linkis-computation-governance/linkis-manager/linkis-manager-common/src/main/scala/org/apache/linkis/manager/common/protocol/resource/ResourceProtocol.scala @@ -45,4 +45,13 @@ case class ResponseTaskRunningInfo( resourceMap: util.HashMap[String, ResourceWithStatus], extraInfoMap: util.HashMap[String, Object] ) extends RetryableProtocol - with RequestProtocol + with RequestProtocol { + + private val mutableResourceMap = Option(resourceMap).getOrElse(new util.HashMap) + private val mutableExtraInfoMap = Option(extraInfoMap).getOrElse(new util.HashMap) + + def getResourceMaps: util.HashMap[String, ResourceWithStatus] = mutableResourceMap + + def getExtraInfoMap: util.HashMap[String, Object] = mutableExtraInfoMap + +} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java index 7275561a8e..92d3032207 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/dao/NodeManagerMapper.java @@ -41,12 +41,10 @@ void updateNodeInstance( List getAllNodes(); - void updateNodeInstanceOverload(@Param("persistenceNode") PersistenceNode persistenceNode); + void updateNodeInstanceByInstance(@Param("persistenceNode") PersistenceNode persistenceNode); Integer getNodeInstanceId(@Param("instance") String instance); - Integer getIdByInstance(@Param("instance") String instance); - List getNodeInstanceIds(@Param("instances") List instances); PersistenceNode getNodeInstance(@Param("instance") String instance); @@ -77,5 +75,7 @@ void updateNodeLabelRelation( List getNodeInstancesByOwnerList(@Param("owner") List owner); List getEMNodeInfoList( - @Param("creatorUsers") List creatorUsers, @Param("statuss") List statuss); + @Param("creatorUsers") List creatorUsers, + @Param("statuss") List statuss, + @Param("ecInstancesList") List ecInstancesList); } diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java index afbf48b9f7..1a0887df06 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/NodeMetricManagerPersistence.java @@ -39,7 +39,7 @@ public interface NodeMetricManagerPersistence { * @param nodeMetrics * @throws PersistenceErrorException */ - void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorException; + void addOrupdateNodeMetrics(NodeMetrics nodeMetrics); /** * 获取多个节点的 metrics列表 diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java index 86f202acee..061588ac65 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeManagerPersistence.java @@ -40,6 +40,7 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; +import java.util.Objects; import java.util.stream.Collectors; import com.google.common.collect.Lists; @@ -77,6 +78,7 @@ public void addNodeInstance(Node node) throws PersistenceErrorException { persistenceNode.setName(node.getServiceInstance().getApplicationName()); persistenceNode.setOwner(node.getOwner()); persistenceNode.setMark(node.getMark()); + persistenceNode.setTicketId(node.getTicketId()); persistenceNode.setCreateTime(new Date()); persistenceNode.setUpdateTime(new Date()); persistenceNode.setCreator(node.getOwner()); @@ -155,6 +157,8 @@ public List getNodes(String owner) throws PersistenceErrorException { persistenceNodeEntity.setMark(persistenceNode.getMark()); persistenceNodeEntity.setOwner(persistenceNode.getOwner()); persistenceNodeEntity.setStartTime(persistenceNode.getCreateTime()); + persistenceNodeEntity.setIdentifier(persistenceNode.getIdentifier()); + persistenceNodeEntity.setTicketId(persistenceNode.getTicketId()); persistenceNodeEntitys.add(persistenceNodeEntity); } } @@ -173,6 +177,8 @@ public List getAllNodes() throws PersistenceErrorException { serviceInstance.setInstance(persistenceNode.getInstance()); persistenceNodeEntity.setServiceInstance(serviceInstance); persistenceNodeEntity.setMark(persistenceNode.getMark()); + persistenceNodeEntity.setIdentifier(persistenceNode.getIdentifier()); + persistenceNodeEntity.setTicketId(persistenceNode.getTicketId()); persistenceNodeEntity.setOwner(persistenceNode.getOwner()); persistenceNodeEntity.setStartTime(persistenceNode.getCreateTime()); persistenceNodeEntity.setUpdateTime(persistenceNode.getUpdateTime()); @@ -183,24 +189,23 @@ public List getAllNodes() throws PersistenceErrorException { } @Override - public void updateNodeInstance(Node node) throws PersistenceErrorException { + public void updateNodeInstance(Node node) { - if (null != node) { + if (Objects.nonNull(node)) { PersistenceNode persistenceNode = new PersistenceNode(); persistenceNode.setInstance(node.getServiceInstance().getInstance()); persistenceNode.setName(node.getServiceInstance().getApplicationName()); - persistenceNode.setOwner(node.getOwner()); persistenceNode.setMark(node.getMark()); - persistenceNode.setCreateTime(new Date()); persistenceNode.setUpdateTime(new Date()); persistenceNode.setCreator(node.getOwner()); persistenceNode.setUpdator(node.getOwner()); - nodeManagerMapper.updateNodeInstanceOverload(persistenceNode); + persistenceNode.setIdentifier(node.getIdentifier()); + nodeManagerMapper.updateNodeInstanceByInstance(persistenceNode); } } @Override - public Node getNode(ServiceInstance serviceInstance) throws PersistenceErrorException { + public Node getNode(ServiceInstance serviceInstance) { String instance = serviceInstance.getInstance(); PersistenceNode nodeInstances = nodeManagerMapper.getNodeInstance(instance); if (null == nodeInstances) { @@ -210,6 +215,8 @@ public Node getNode(ServiceInstance serviceInstance) throws PersistenceErrorExce persistenceNodeEntity.setServiceInstance(serviceInstance); persistenceNodeEntity.setOwner(nodeInstances.getOwner()); persistenceNodeEntity.setMark(nodeInstances.getMark()); + persistenceNodeEntity.setIdentifier(nodeInstances.getIdentifier()); + persistenceNodeEntity.setTicketId(nodeInstances.getTicketId()); persistenceNodeEntity.setStartTime(nodeInstances.getCreateTime()); return persistenceNodeEntity; } @@ -218,7 +225,7 @@ public Node getNode(ServiceInstance serviceInstance) throws PersistenceErrorExce public void addEngineNode(EngineNode engineNode) throws PersistenceErrorException { // insert engine(插入engine) addNodeInstance(engineNode); - // insert relationship,(插入关联关系,)todo 异常后续统一处理 + // insert relationship,(插入关联关系,) String engineNodeInstance = engineNode.getServiceInstance().getInstance(); if (null == engineNode.getEMNode()) { throw new PersistenceErrorException( @@ -256,6 +263,8 @@ public EngineNode getEngineNode(ServiceInstance serviceInstance) } amEngineNode.setOwner(engineNode.getOwner()); amEngineNode.setMark(engineNode.getMark()); + amEngineNode.setIdentifier(engineNode.getIdentifier()); + amEngineNode.setTicketId(engineNode.getTicketId()); amEngineNode.setStartTime(engineNode.getCreateTime()); PersistenceNode emNode = nodeManagerMapper.getEMNodeInstanceByEngineNode(serviceInstance.getInstance()); @@ -300,6 +309,8 @@ public List getEngineNodeByEM(ServiceInstance serviceInstance) amEngineNode.setServiceInstance(engineServiceInstance); amEngineNode.setOwner(engineNode.getOwner()); amEngineNode.setMark(engineNode.getMark()); + amEngineNode.setIdentifier(engineNode.getIdentifier()); + amEngineNode.setTicketId(engineNode.getTicketId()); amEngineNode.setStartTime(engineNode.getCreateTime()); amEngineNode.setEMNode(amEmNode); @@ -340,6 +351,8 @@ public List getEngineNodeByServiceInstance(List ser amEngineNode.setServiceInstance(serviceInstance); amEngineNode.setOwner(engineNode.getOwner()); amEngineNode.setMark(engineNode.getMark()); + amEngineNode.setIdentifier(engineNode.getIdentifier()); + amEngineNode.setTicketId(engineNode.getTicketId()); amEngineNode.setStartTime(engineNode.getCreateTime()); amEngineNodeList.add(amEngineNode); }); @@ -351,19 +364,24 @@ public List getEngineNodeByServiceInstance(List ser @Override public List getNodesByOwnerList(List ownerlist) { - List nodeInstances = nodeManagerMapper.getNodeInstancesByOwnerList(ownerlist); List persistenceNodeEntitys = new ArrayList<>(); - if (!nodeInstances.isEmpty()) { - for (PersistenceNode persistenceNode : nodeInstances) { - PersistenceNodeEntity persistenceNodeEntity = new PersistenceNodeEntity(); - ServiceInstance serviceInstance = new ServiceInstance(); - serviceInstance.setApplicationName(persistenceNode.getName()); - serviceInstance.setInstance(persistenceNode.getInstance()); - persistenceNodeEntity.setServiceInstance(serviceInstance); - persistenceNodeEntity.setMark(persistenceNode.getMark()); - persistenceNodeEntity.setOwner(persistenceNode.getOwner()); - persistenceNodeEntity.setStartTime(persistenceNode.getCreateTime()); - persistenceNodeEntitys.add(persistenceNodeEntity); + if (CollectionUtils.isNotEmpty(ownerlist)) { + List nodeInstances = + nodeManagerMapper.getNodeInstancesByOwnerList(ownerlist); + if (CollectionUtils.isNotEmpty(nodeInstances)) { + for (PersistenceNode persistenceNode : nodeInstances) { + PersistenceNodeEntity persistenceNodeEntity = new PersistenceNodeEntity(); + ServiceInstance serviceInstance = new ServiceInstance(); + serviceInstance.setApplicationName(persistenceNode.getName()); + serviceInstance.setInstance(persistenceNode.getInstance()); + persistenceNodeEntity.setServiceInstance(serviceInstance); + persistenceNodeEntity.setMark(persistenceNode.getMark()); + persistenceNodeEntity.setOwner(persistenceNode.getOwner()); + persistenceNodeEntity.setStartTime(persistenceNode.getCreateTime()); + persistenceNodeEntity.setIdentifier(persistenceNode.getIdentifier()); + persistenceNodeEntity.setTicketId(persistenceNode.getTicketId()); + persistenceNodeEntitys.add(persistenceNodeEntity); + } } } return persistenceNodeEntitys; diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java index dc677da193..67f21a7fb5 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/org/apache/linkis/manager/persistence/impl/DefaultNodeMetricManagerPersistence.java @@ -18,6 +18,7 @@ package org.apache.linkis.manager.persistence.impl; import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; import org.apache.linkis.manager.common.entity.enumeration.NodeStatus; import org.apache.linkis.manager.common.entity.metrics.NodeMetrics; import org.apache.linkis.manager.common.entity.node.Node; @@ -63,7 +64,6 @@ public void setNodeMetricManagerMapper(NodeMetricManagerMapper nodeMetricManager @Override public void addNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorException { - // 直接插入 NodeMetric即可 PersistenceNodeMetrics persistenceNodeMetrics = new PersistenceNodeMetrics(); persistenceNodeMetrics.setInstance(nodeMetrics.getServiceInstance().getInstance()); persistenceNodeMetrics.setHealthy(nodeMetrics.getHealthy()); @@ -72,19 +72,17 @@ public void addNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorExcep persistenceNodeMetrics.setStatus(nodeMetrics.getStatus()); persistenceNodeMetrics.setCreateTime(new Date()); persistenceNodeMetrics.setUpdateTime(new Date()); - // todo 异常信息后面统一处理 nodeMetricManagerMapper.addNodeMetrics(persistenceNodeMetrics); } @Override - public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceErrorException { + public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) { if (null == nodeMetrics.getServiceInstance()) { logger.warn( "The request of update node metrics was ignored, because the node metrics service instance is null"); return; } String instance = nodeMetrics.getServiceInstance().getInstance(); - // todo 异常信息后面统一处理 PersistenceNode node = nodeManagerMapper.getNodeInstance(instance); if (node == null) { logger.warn( @@ -94,7 +92,6 @@ public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceEr return; } int isInstanceIdExist = nodeMetricManagerMapper.checkInstanceExist(instance); - // 是否存在 PersistenceNodeMetrics persistenceNodeMetrics = new PersistenceNodeMetrics(); if (isInstanceIdExist == 0) { persistenceNodeMetrics.setInstance(nodeMetrics.getServiceInstance().getInstance()); @@ -104,18 +101,26 @@ public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceEr persistenceNodeMetrics.setStatus(nodeMetrics.getStatus()); persistenceNodeMetrics.setCreateTime(new Date()); persistenceNodeMetrics.setUpdateTime(new Date()); - // todo 异常信息后面统一处理 nodeMetricManagerMapper.addNodeMetrics(persistenceNodeMetrics); } else if (isInstanceIdExist == 1) { // ec node metircs report ignore update Shutingdown node (for case: asyn stop engine) PersistenceNodeMetrics oldMetrics = nodeMetricManagerMapper.getNodeMetricsByInstance(instance); - if (NodeStatus.ShuttingDown.ordinal() == oldMetrics.getStatus()) { + + boolean isECM = + nodeMetrics + .getServiceInstance() + .getApplicationName() + .equalsIgnoreCase(GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME().getValue()); + if (!isECM + && oldMetrics != null + && NodeStatus.ShuttingDown.ordinal() <= oldMetrics.getStatus()) { logger.info( - "ignore update ShuttingDown status node:{} to status:{}", + "ignore update status node:{} from:{} to status:{}", instance, + NodeStatus.values()[oldMetrics.getStatus()].name(), NodeStatus.values()[nodeMetrics.getStatus()].name()); - persistenceNodeMetrics.setStatus(null); + persistenceNodeMetrics.setStatus(oldMetrics.getStatus()); } else { persistenceNodeMetrics.setStatus(nodeMetrics.getStatus()); } @@ -127,7 +132,6 @@ public void addOrupdateNodeMetrics(NodeMetrics nodeMetrics) throws PersistenceEr persistenceNodeMetrics.setUpdateTime(new Date()); nodeMetricManagerMapper.updateNodeMetrics(persistenceNodeMetrics, instance); } else { - // 其他情况都不处理,打印个告警日志 } } @@ -142,7 +146,6 @@ public List getNodeMetrics(List nodes) instances.add(instance); } - // 根据 id 查 metric 信息 List persistenceNodeMetricsList = nodeMetricManagerMapper.getNodeMetricsByInstances(instances); diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml index 9c9eb2cc63..6806b7e8d4 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/ECResourceRecordMapper.xml @@ -83,25 +83,26 @@ \ No newline at end of file diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml index 2294cdbca4..f0dab6eac1 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/resources/mapper/common/NodeManagerMapper.xml @@ -20,120 +20,202 @@ - update linkis_cg_manager_engine_em set engine_instance = #{instance} where engine_instance = #{tickedId} + UPDATE linkis_cg_manager_engine_em + SET engine_instance = #{instance} + WHERE engine_instance = #{tickedId} - update linkis_cg_manager_label_service_instance set service_instance = #{instance} where service_instance = #{tickedId} + UPDATE linkis_cg_manager_label_service_instance + SET service_instance = #{instance} + WHERE service_instance = #{tickedId} - insert into linkis_cg_manager_service_instance(instance,name,owner,mark,update_time,create_time,updator,creator) - values(#{instance},#{name},#{owner},#{mark},#{updateTime},#{createTime},#{updator},#{creator}) + INSERT INTO linkis_cg_manager_service_instance (instance, name, owner, mark, ticketId, update_time + , create_time, updator, creator) + VALUES (#{instance}, #{name}, #{owner}, #{mark}, #{ticketId}, #{updateTime} + , #{createTime}, #{updator}, #{creator}) - update linkis_cg_manager_service_instance set instance = #{persistenceNode.instance}, owner = #{persistenceNode.owner},mark = #{persistenceNode.mark},name = #{persistenceNode.name}, - update_time = #{persistenceNode.updateTime},updator = #{persistenceNode.updator},creator = #{persistenceNode.creator} where instance = #{instance} + UPDATE linkis_cg_manager_service_instance + + + instance = #{persistenceNode.instance}, + + + owner = #{persistenceNode.owner}, + + + mark = #{persistenceNode.mark}, + + + name = #{persistenceNode.name}, + + + update_time = #{persistenceNode.updateTime}, + + + updator = #{persistenceNode.updator}, + + + creator = #{persistenceNode.creator}, + + + identifier = #{persistenceNode.identifier}, + + + WHERE instance = #{instance} - delete from linkis_cg_manager_service_instance where instance = #{instance} + DELETE FROM linkis_cg_manager_service_instance + WHERE instance = #{instance} - - update linkis_cg_manager_service_instance set owner = #{persistenceNode.owner},mark = #{persistenceNode.mark},name = #{persistenceNode.name}, - update_time = #{persistenceNode.updateTime},create_time = #{persistenceNode.createTime},updator = #{persistenceNode.updator} - ,creator = #{persistenceNode.creator} where instance = #{persistenceNode.instance} + + UPDATE linkis_cg_manager_service_instance + + + mark = #{persistenceNode.mark}, + + + name = #{persistenceNode.name}, + + + update_time = #{persistenceNode.updateTime}, + + + updator = #{persistenceNode.updator}, + + + creator = #{persistenceNode.creator}, + + + identifier = #{persistenceNode.identifier}, + + + WHERE instance = #{persistenceNode.instance} - - - insert into linkis_cg_manager_engine_em (engine_instance, em_instance, update_time, create_time) - values(#{engineNodeInstance}, #{emNodeInstance}, now(), now()) + INSERT INTO linkis_cg_manager_engine_em (engine_instance, em_instance, update_time, create_time) + VALUES (#{engineNodeInstance}, #{emNodeInstance}, now(), now()) - delete from linkis_cg_manager_engine_em where engine_instance = #{engineNodeInstance} and em_instance = #{emNodeInstance} + DELETE FROM linkis_cg_manager_engine_em + WHERE engine_instance = #{engineNodeInstance} + AND em_instance = #{emNodeInstance} diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java index 67b4bc354c..861b557fb3 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/org/apache/linkis/manager/dao/NodeManagerMapperTest.java @@ -93,7 +93,7 @@ void updateNodeInstanceOverload() { persistenceNode.setMark("testmark3"); persistenceNode.setUpdator("testupdator3"); persistenceNode.setCreator("testcreator3"); - nodeManagerMapper.updateNodeInstanceOverload(persistenceNode); + nodeManagerMapper.updateNodeInstanceByInstance(persistenceNode); PersistenceNode persistenceNodes = nodeManagerMapper.getNodeInstance("instance2"); assertTrue(persistenceNode.getName().equals(persistenceNodes.getName())); } diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml index 175f2cb7ad..6d26ae863a 100644 --- a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml +++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml @@ -673,6 +673,7 @@ data: `name` varchar(32) COLLATE utf8_bin DEFAULT NULL, `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL, `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, @@ -1183,12 +1184,12 @@ data: (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = '*-*,*-*'); - -- spark2.4.3 default configuration + -- spark default configuration insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @SPARK_ALL); - -- hive1.2.1 default configuration + -- hive default configuration insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) (select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @HIVE_ALL); diff --git a/linkis-dist/package/conf/linkis-mg-gateway.properties b/linkis-dist/package/conf/linkis-mg-gateway.properties index 84be3d897d..6a4f1d94a6 100644 --- a/linkis-dist/package/conf/linkis-mg-gateway.properties +++ b/linkis-dist/package/conf/linkis-mg-gateway.properties @@ -28,7 +28,7 @@ wds.linkis.login_encrypt.enable=false ##LDAP wds.linkis.ldap.proxy.url= wds.linkis.ldap.proxy.baseDN= -wds.linkis.ldap.proxy.userNameFormat= +#wds.linkis.ldap.proxy.userNameFormat=cn=%s@xxx.com,OU=xxx,DC=xxx,DC=com wds.linkis.admin.user=hadoop #wds.linkis.admin.password= ##Spring diff --git a/linkis-dist/package/conf/log4j2.xml b/linkis-dist/package/conf/log4j2.xml index c10f78e6c7..aeda99788c 100644 --- a/linkis-dist/package/conf/log4j2.xml +++ b/linkis-dist/package/conf/log4j2.xml @@ -20,7 +20,7 @@ - + diff --git a/linkis-dist/package/conf/version.properties b/linkis-dist/package/conf/version.properties new file mode 100644 index 0000000000..0397f621ec --- /dev/null +++ b/linkis-dist/package/conf/version.properties @@ -0,0 +1,20 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# batter to update this info when build linkis +#eureka metadata-map.linkis.app.version +version=Linkis-x.x.x-202306081009 +#build time information +build_time=2022-09-21 15:29.39 \ No newline at end of file diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql index 8fe51ceef8..ef169ba0d7 100644 --- a/linkis-dist/package/db/linkis_ddl.sql +++ b/linkis-dist/package/db/linkis_ddl.sql @@ -37,7 +37,7 @@ CREATE TABLE `linkis_ps_configuration_config_key`( `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `engine_conn_type` varchar(50) DEFAULT '' COMMENT 'engine type,such as spark,hive etc', `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', @@ -45,6 +45,7 @@ CREATE TABLE `linkis_ps_configuration_config_key`( `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', + UNIQUE INDEX `uniq_key_ectype` (`key`,`engine_conn_type`), PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -153,6 +154,8 @@ DROP TABLE IF EXISTS `linkis_ps_udf_manager`; CREATE TABLE `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -166,6 +169,8 @@ CREATE TABLE `linkis_ps_udf_shared_group` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `shared_group` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -174,7 +179,9 @@ CREATE TABLE `linkis_ps_udf_shared_info` ( `id` bigint(20) PRIMARY KEY NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, - `user_name` varchar(50) NOT NULL + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -190,7 +197,8 @@ CREATE TABLE `linkis_ps_udf_tree` ( `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', - PRIMARY KEY (`id`) + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -203,7 +211,10 @@ CREATE TABLE `linkis_ps_udf_user_load` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; @@ -235,6 +246,7 @@ CREATE TABLE `linkis_ps_udf_version` ( `use_format` varchar(255) DEFAULT NULL, `description` varchar(255) NOT NULL COMMENT 'version desc', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -486,6 +498,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; @@ -670,6 +683,8 @@ CREATE TABLE `linkis_cg_manager_service_instance` ( `name` varchar(32) COLLATE utf8_bin DEFAULT NULL, `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL, `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `ticketId` varchar(255) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql index dffa9a7d76..b4dc1fa62c 100644 --- a/linkis-dist/package/db/linkis_dml.sql +++ b/linkis-dist/package/db/linkis_dml.sql @@ -380,8 +380,8 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12001','队列CPU资源不足,可以调整Spark执行器个数','Queue CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12002','队列内存资源不足,可以调整Spark执行器个数','Insufficient queue memory',0); diff --git a/linkis-dist/package/db/module/linkis_configuration.sql b/linkis-dist/package/db/module/linkis_configuration.sql index fefa6f9f99..57ba82d8d1 100644 --- a/linkis-dist/package/db/module/linkis_configuration.sql +++ b/linkis-dist/package/db/module/linkis_configuration.sql @@ -29,6 +29,9 @@ CREATE TABLE `linkis_ps_configuration_config_key`( `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; diff --git a/linkis-dist/package/db/module/linkis_manager.sql b/linkis-dist/package/db/module/linkis_manager.sql index 1a404af9bb..c3128633e5 100644 --- a/linkis-dist/package/db/module/linkis_manager.sql +++ b/linkis-dist/package/db/module/linkis_manager.sql @@ -23,6 +23,7 @@ CREATE TABLE `linkis_cg_manager_service_instance` ( `name` varchar(32) COLLATE utf8_bin DEFAULT NULL, `owner` varchar(32) COLLATE utf8_bin DEFAULT NULL, `mark` varchar(32) COLLATE utf8_bin DEFAULT NULL, + `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, diff --git a/linkis-dist/package/db/module/linkis_udf.sql b/linkis-dist/package/db/module/linkis_udf.sql index 999793b1cc..3e7b2c4f13 100644 --- a/linkis-dist/package/db/module/linkis_udf.sql +++ b/linkis-dist/package/db/module/linkis_udf.sql @@ -25,6 +25,8 @@ DROP TABLE IF EXISTS `linkis_ps_udf_manager`; CREATE TABLE `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -73,9 +75,12 @@ CREATE TABLE `linkis_ps_udf_tree` ( DROP TABLE IF EXISTS `linkis_ps_udf_user_load`; CREATE TABLE `linkis_ps_udf_user_load` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, - `udf_id` int(11) NOT NULL, + `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; diff --git a/linkis-dist/package/db/udf/udf_sys.sql b/linkis-dist/package/db/udf/udf_sys.sql new file mode 100644 index 0000000000..903834596c --- /dev/null +++ b/linkis-dist/package/db/udf/udf_sys.sql @@ -0,0 +1,813 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- 字符串函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","字符串函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="字符串函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","",""," Column substring(Column str, int pos, int len)","Returns the substring from string str before count occurrences of the delimiter delim.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","concat","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "concat"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string concat(STRING|BINARY a, STRING|BINARY b...)","Returns the string or bytes resulting from concatenating the strings or bytes passed in as parameters in order. For example, concat('foo', 'bar') results in 'foobar'. Note that this function can take any number of input strings.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","concat_ws","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "concat_ws"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string concat_ws(string SEP, array)","Like concat(), but with custom separator SEP.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","decode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "decode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string decode(binary bin, string charset)","Decodes the first argument into a String using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). If either argument is null, the result will also be null. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","elt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "elt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string elt(N int,str1 string,str2 string,str3 string,...)","Return string at index number. For example elt(2,'hello','world') returns 'world'.?Returns NULL if N is less than 1 or greater than the number of arguments.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","format_number","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "format_number"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string format_number(number x, int d)","Formats the number X to a format like '#,###,###.##', rounded to D decimal places, and returns the result as a string. If D is 0, the result has no decimal point or fractional part.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","get_json_object","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "get_json_object"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string get_json_object(string json_string, string path)","Extracts json object from a json string based on json path specified, and returns json string of the extracted json object. It will return null if the input json string is invalid.?NOTE: The json path can only have the characters [0-9a-z_], i.e., no upper",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lower","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lower"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lower(string A) lcase(string A)","Returns the string resulting from converting all characters of B to lower case. For example, lower('fOoBaR') results in 'foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lcase","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lcase"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lcase(string A)","Returns the string resulting from converting all characters of B to lower case. For example, lower('fOoBaR') results in 'foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lpad","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lpad"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lpad(string str, int len, string pad)","Returns str, left-padded with pad to a length of len. If str is longer than len, the return value is shortened to len characters. In case of empty pad string, the return value is null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ltrim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ltrim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string ltrim(string A)","Returns the string resulting from trimming spaces from the beginning(left hand side) of A. For example, ltrim(' foobar ') results in 'foobar '.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","parse_url","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "parse_url"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string parse_url(string urlString, string partToExtract [, string keyToExtract])","Returns the specified part from the URL. Valid values for partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO. For example, parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') returns 'facebook.com'. Als",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","printf","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "printf"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string printf(String format, Obj... args)","Returns the input formatted according do printf-style format strings .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regexp_extract","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regexp_extract"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string regexp_extract(string subject, string pattern, int index)","Returns the string extracted using the pattern. For example, regexp_extract('foothebar', 'foo(.*?)(bar)', 2) returns 'bar.' Note that some care is necessary in using predefined character classes: using 's' as the second argument will match the letter s; '",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regexp_replace","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regexp_replace"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string regexp_replace(string INITIAL_STRING, string PATTERN, string REPLACEMENT)","Returns the string resulting from replacing all substrings in INITIAL_STRING that match the java regular expression syntax defined in PATTERN with instances of REPLACEMENT. For example, regexp_replace(foobar, oo|ar, ) returns 'fb.' Note that some care is ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","repeat","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "repeat"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string repeat(string str, int n)","Repeats str n times.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","replace","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "replace"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string replace(string A, string OLD, string NEW)","Returns the string A with all non-overlapping?occurrences of OLD replaced with NEW . Example: select replace(ababab, abab, Z); returns Zab.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","reverse","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "reverse"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string reverse(string A)","Returns the reversed string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rpad","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rpad"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string rpad(string str, int len, string pad)","Returns str, right-padded with pad to a length of len. If str is longer than len, the return value is shortened to len characters. In case of empty pad string, the return value is null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rtrim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rtrim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string rtrim(string A)","Returns the string resulting from trimming spaces from the end(right hand side) of A. For example, rtrim(' foobar ') results in ' foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","space","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "space"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string space(int n)","Returns a string of n spaces.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substr(STRING|BINARY A, INT start [, INT len])","Returns the substring or slice of the byte array of A starting from start position till the end of string A or with optional length len. For example, substr('foobar', 4) results in 'bar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substring(STRING|BINARY a, INT start [, INT len])","Returns the substring or slice of the byte array of A starting from start position till the end of string A or with optional length len. For example, substr('foobar', 4) results in 'bar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring_index","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring_index"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substring_index(string A, string delim, int count)","Returns the substring from string A before count occurrences of the delimiter delim. If count is positive, everything to the left of the final delimiter (counting from the left) is returned. If count is negative, everything to the right of the final delim",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","translate","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "translate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string translate(string|char|varchar input, string|char|varchar from, string|char|varchar to)","Translates the input string by replacing the characters present in the?from?string with the corresponding characters in the?to?string. This is similar to the?translatefunction in?PostgreSQL. If any of the parameters to this UDF are NULL, the result is NUL",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","trim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "trim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string trim(string A)","Returns the string resulting from trimming spaces from both ends of A. For example, trim(' foobar ') results in 'foobar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","upper","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "upper"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string upper(string A)","Returns the string resulting from converting all characters of A to upper case. For example, upper('fOoBaR') results in 'FOOBAR'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ucase","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ucase"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string ucase(string A)","Returns the string resulting from converting all characters of A to upper case. For example, upper('fOoBaR') results in 'FOOBAR'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","initcap","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "initcap"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string initcap(string A)","Returns string, with the first letter of each word in uppercase, all other letters in lowercase. Words are delimited by whitespace.?",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","soundex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "soundex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string soundex(string A)","Returns soundex code of the string. For example, soundex('Miller') results in M460.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","str_to_map","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "str_to_map"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","map str_to_map(text[, delimiter1, delimiter2])","Splits text into key-value pairs using two delimiters. Delimiter1 separates text into K-V pairs, and Delimiter2 splits each K-V pair. Default delimiters are ',' for delimiter1 and ':' for delimiter2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ascii","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ascii"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int ascii(string str)","Returns the numeric value of the first character of str.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","character_length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "character_length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int character_length(string str)","Returns the number of UTF-8 characters contained in str . The function char_length is shorthand for this function.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","field","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "field"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int field(val T,val1 T,val2 T,val3 T,...)","Returns the index of val in the val1,val2,val3,... list or 0 if not found.?For example?field('world','say','hello','world') returns 3.All primitive types are supported, arguments are compared using str.equals(x). If val is NULL, the return value is 0.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","find_in_set","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "find_in_set"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int find_in_set(string str, string strList)","Returns the first occurance of str in strList where strList is a comma-delimited string. Returns null if either argument is null. Returns 0 if the first argument contains any commas. For example, find_in_set('ab', 'abc,b,ab,c,def') returns 3.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","instr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "instr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int instr(string str, string substr)","Returns the position of the first occurrence of?substr?in?str. Returns?null?if either of the arguments are?null?and returns?0?if?substr?could not be found in?str. Be aware that this is not zero based. The first character in?str?has index 1.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int length(string A)","Returns the length of the string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","locate","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "locate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int locate(string substr, string str[, int pos])","Returns the position of the first occurrence of substr in str after position pos.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","octet_length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "octet_length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int octet_length(string str)","Returns the number of octets required to hold the string str in UTF-8 encoding. Note that octet_length(str) can be larger than character_length(str).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","levenshtein","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "levenshtein"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int levenshtein(string A, string B)","Returns the Levenshtein distance between two strings?. For example, levenshtein('kitten', 'sitting') results in 3.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","in_file","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "in_file"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean in_file(string str, string filename)","Returns true if the string str appears as an entire line in filename.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","encode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "encode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary encode(string src, string charset)","Encodes the first argument into a BINARY using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). If either argument is null, the result will also be null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","expr","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "expr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expr(String expr)","Parses the expression string into the column that it represents, similar to DataFrame.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unbase64","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unbase64"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary unbase64(string str)","Converts the argument from a base 64 string to BINARY. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","context_ngrams","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "context_ngrams"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> context_ngrams(array>, array, int K, int pf)","Returns the top-k contextual N-grams from a set of tokenized sentences, given a string of context. See?StatisticsAndDataMining?for more information.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","format_string","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "format_string"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column format_string(String format, scala.collection.Seq arguments)","Formats the arguments in printf-style and returns the result as a string column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ngrams","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ngrams"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> ngrams(array>, int N, int K, int pf)","Returns the top-k N-grams from a set of tokenized sentences, such as those returned by the sentences() UDAF. See?StatisticsAndDataMining?for more information.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sentences","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sentences"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> sentences(string str, string lang, string locale)","Tokenizes a string of natural language text into words and sentences, where each sentence is broken at the appropriate sentence boundary and returned as an array of words. The 'lang' and 'locale' are optional arguments. For example, sentences('Hello there",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","split","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "split"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array split(string str, string pat)","Splits str around pat (pat is a regular expression).",now(),"",now()); +-- 数值函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","数值函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="数值函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","abs","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "abs"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column abs(Column e)","Computes the absolute value",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","randn","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "randn"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column randn(long seed)","Generate a column with independent and identically distributed (i.i.d.) samples from the standard normal distribution.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rint","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rint"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column rint(Column e)","Returns the double value that is closest in value to the argument and is equal to a mathematical integer.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","signum","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "signum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column signum(Column e)","Computes the signum of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sinh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sinh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column sinh(String columnName)","Computes the hyperbolic sine of the given column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","tanh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "tanh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column tanh(Column e)","Computes the hyperbolic tangent of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","toDegrees","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "toDegrees"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column toDegrees(Column e)","Use degrees. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","toRadians","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "toRadians"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column toRadians(Column e)","Use radians.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","atan2","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "atan2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column atan2(Column l, Column r)","Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cosh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cosh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column cosh(Column e)","Computes the hyperbolic cosine of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","expm1","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "expm1"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expm1(Column e)","Computes the exponential of the given value minus one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","round","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "round"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE round(DOUBLE a [, INT d])","Returns the rounded BIGINT value of a or a rounded to d decimal places.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hypot","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hypot"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column hypot(Column l, Column r)","Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bround","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bround"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE bround(DOUBLE a [, INT decimals])","Returns the rounded BIGINT value of a using HALF_EVEN rounding mode with optional decimal places d.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","floor","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "floor"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT floor(DOUBLE a)","Returns the maximum?BIGINT?value that is equal to or less than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ceil","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ceil"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT ceil(DOUBLE a)","Returns the minimum BIGINT value that is equal to or greater than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ceiling","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ceiling"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT ceiling(DOUBLE a)","Returns the minimum BIGINT value that is equal to or greater than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rand","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rand"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE rand([INT seed])","Returns a random number (that changes from row to row) that is distributed uniformly from 0 to 1. Specifying the seed will make sure the generated random number sequence is deterministic.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log1p","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log1p"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column log1p(Column e)","Computes the natural logarithm of the given value plus one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","exp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "exp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE exp(TL a)","Returns?ea?where?e?is the base of the natural logarithm. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ln","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ln"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE ln(TL a)","Returns the natural logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log10","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log10"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log10(TL a)","Returns the base-10 logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log2(DOUBLE|a)","Returns the base-2 logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log(TL base, DOUBLE |DECIMALa)","Returns the base-base?logarithm of the argument?a.?",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pow","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pow"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE pow(DOUBLE a, DOUBLE p)","Returns?ap.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","power","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "power"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE power(DOUBLE a, DOUBLE p)","Returns?ap.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sqrt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sqrt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sqrt(DOUBLE a), sqrt(DECIMAL a)","Returns the square root of?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING bin(BIGINT a)","Returns the number in binary format.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING hex(BIGINT a) hex(STRING a) hex(BINARY a)","If the argument is an?INT?or?binary,?hex?returns the number as a?STRING?in hexadecimal format. Otherwise if the number is a?STRING, it converts each character into its hexadecimal representation and returns the resulting?STRING.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unhex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unhex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BINARY unhex(STRING a)","Inverse of hex. Interprets each pair of characters as a hexadecimal number and converts to the byte representation of the number. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","conv","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "conv"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING conv(BIGINT num, INT from_base, INT to_base), conv(STRING num, INT from_base, INT to_base)","Converts a number from a given base to another .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","abs","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "abs"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE abs(DOUBLE a)","Returns the absolute value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pmod","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pmod"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T pmod(T a, T b),","Returns the positive value of?a mod b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sin(T a)","Returns the sine of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE asin(T a)","Returns the arc sin of?a?if -1<=a<=1 or NULL otherwise. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cos","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cos"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE cos(T a)","Returns the cosine of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","acos","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "acos"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE acos(T a)","Returns the arccosine of?a?if -1<=a<=1 or NULL otherwise. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","tan","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "tan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE tan(T a)","Returns the tangent of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","atan","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "atan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE atan(T a)","Returns the arctangent of?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","degrees","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "degrees"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE degrees(T a)","Converts value of?a?from radians to degrees. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","radians","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "radians"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE radians(T a)","Converts value of?a?from degrees to radians. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","positive","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "positive"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T positive(T a)","Returns?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","negate","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "negate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column negate(Column e)","Unary minus.negate the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","negative","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "negative"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T negative(T a)","Returns?-a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sign","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sign"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T sign(T a)","Returns the sign of?a?as '1.0' (if?a?is positive) or '-1.0' (if?a?is negative), '0.0' otherwise. The decimal version returns INT instead of DOUBLE. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","e","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "e"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE e()","Returns the value of?e.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pi","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pi"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE pi()","Returns the value of?pi.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","factorial","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "factorial"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT factorial(INT a)","Returns the factorial of?a?. Valid?a?is [0..20].",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cbrt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cbrt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE cbrt(DOUBLE a)","Returns the cube root of?a?double value?.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftleft","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftleft"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftleft(T a, INT b)","Bitwise left shift. Shifts a b positions to the left. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftright","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftright"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftright(T a, INT b)","Bitwise right shift. Shifts a b positions to the right. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftrightunsigned","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftrightunsigned"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftrightunsigned(T a, INT b)","Bitwise unsigned right shift. Shifts a b positions to the right. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","greatest","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "greatest"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T greatest(T v1, T v2, ...)","Returns the greatest value of the list of values. Fixed to return NULL when one or more arguments are NULL, and strict type restriction relaxed, consistent with > operator.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","least","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "least"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T least(T v1, T v2, ...)","Returns the least value of the list of values.?Fixed to return NULL when one or more arguments are NULL, and strict type restriction relaxed, consistent with < operator .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","width_bucket","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "width_bucket"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","INT width_bucket(NUMERIC expr, NUMERIC min_value, NUMERIC max_value, INT num_buckets)","Returns an integer between 0 and num_buckets+1 by mapping expr into the ith equally sized bucket. Buckets are made by dividing [min_value, max_value] into?equally sized regions. If expr < min_value, return 1, if expr > max_value return num_buckets+1.",now(),"",now()); +-- 日期函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","日期函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="日期函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","",""," Column last_day(Column e)","Given a date column, returns the last day of the month which the given date belongs to.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dayofyear","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dayofyear"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column dayofyear(Column e)","Extracts the day of the year as an integer from a given date/timestamp/string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_unixtime","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_unixtime"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string from_unixtime(bigint unixtime[, string format])","Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string representing the timestamp of that moment in the current system time zone in the format of 1970-01-01 00:00:00.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp()","Gets current Unix timestamp in seconds. This function is not deterministic and its value is not fixed for the scope of a query execution, therefore prevents proper optimization of queries - this has been deprecated since 2.0 in favour of CURRENT_TIMESTAMP",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp(string date)","Converts time string in format?yyyy-MM-dd HH:mm:ss?to Unix timestamp (in seconds), using the default timezone and the default locale, return 0 if fail: unix_timestamp('2009-03-20 11:30:01') = 1237573801",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp(string date, string pattern)","Convert time string with given pattern to Unix time stamp (in seconds), return 0 if fail: unix_timestamp('2009-03-20', 'yyyy-MM-dd') = 1237532400.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_date","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_date"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string to_date(string timestamp)","Returns the date part of a timestamp string : to_date(1970-01-01 00:00:00) = 1970-01-01. returns a date object.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","year","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "year"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int year(string date)","Returns the year part of a date or a timestamp string: year(1970-01-01 00:00:00) = 1970, year(1970-01-01) = 1970.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","quarter","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "quarter"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int quarter(date/timestamp/string)","Returns the quarter of the year for a date, timestamp, or string in the range 1 to 4 . Example: quarter('2015-04-08') = 2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","month","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "month"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int month(string date)","Returns the month part of a date or a timestamp string: month(1970-11-01 00:00:00) = 11, month(1970-11-01) = 11.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int day(string date) ","Returns the day part of a date or a timestamp string: day('1970-11-01 00:00:00') = 1, day('1970-11-01') = 1",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dayofmonth","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dayofmonth"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int dayofmonth(date)","Returns the day part of a date or a timestamp string: dayofmonth('1970-11-01 00:00:00') = 1, dayofmonth('1970-11-01') = 1.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hour","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hour"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int hour(string date)","Returns the hour of the timestamp: hour('2009-07-30 12:58:59') = 12, hour('12:58:59') = 12.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","minute","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "minute"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int minute(string date)","Returns the minute of the timestamp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","second","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "second"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int second(string date)","Returns the second of the timestamp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","weekofyear","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "weekofyear"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int weekofyear(string date)","Returns the week number of a timestamp string: weekofyear(1970-11-01 00:00:00) = 44, weekofyear(1970-11-01) = 44.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","extract","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "extract"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int extract(field FROM source)","Retrieve fields such as days or hours from source. Source must be a date, timestamp, interval or a string that can be converted into either a date or timestamp. Supported fields include: day, dayofweek, hour, minute, month, quarter, second, week and year.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","datediff","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "datediff"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int datediff(string enddate, string startdate)","Returns the number of days from startdate to enddate: datediff('2009-03-01', '2009-02-27') = 2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_add","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_add"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_add(date/timestamp/string?startdate, tinyint/smallint/int days)","Adds a number of days to startdate: date_add('2008-12-31', 1) = '2009-01-01'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_sub","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_sub"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_sub(date/timestamp/string?startdate, tinyint/smallint/int days)","Subtracts a number of days to startdate: date_sub('2008-12-31', 1) = '2008-12-30'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_utc_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_utc_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp from_utc_timestamp({any primitive type} ts, string timezone)","Converts a timestamp* in UTC to a given timezone?.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_utc_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_utc_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp to_utc_timestamp({any?primitive type} ts, string timezone)","Converts a timestamp* in a given timezone to UTC.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_date","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_date"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","date current_date","Returns the current date at the start of query evaluation . All calls of current_date within the same query return the same value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp current_timestamp","Returns the current timestamp at the start of query evaluation. All calls of current_timestamp within the same query return the same value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","add_months","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "add_months"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string add_months(string start_date, int num_months,?output_date_format)","Returns the date that is num_months after start_date. start_date is a string, date or timestamp. num_months is an integer.?If start_date is the last day of the month or if the resulting month has fewer days than the day component of start_date, then the r",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string last_day(string date)","Returns the last day of the month which the date belongs to. date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.?The time part of date is ignored.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","next_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "next_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string next_day(string start_date, string day_of_week)","Returns the first date which is later than start_date and named as day_of_week?.?start_date is a string/date/timestamp. day_of_week is 2 letters, 3 letters or full name of the day of the week (e.g. Mo, tue, FRIDAY). The time part of start_date is ignored.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","trunc","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "trunc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string trunc(string date, string format)","Returns date truncated to the unit specified by the format?. Supported formats: MONTH/MON/MM, YEAR/YYYY/YY. Example: trunc('2015-03-17', 'MM') = 2015-03-01.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","months_between","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "months_between"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double months_between(date1, date2)","Returns number of months between dates date1 and date2 . If date1 is later than date2, then the result is positive. If date1 is earlier than date2, then the result is negative. If date1 and date2 are either the same days of the month or both last days of ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_format","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_format"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_format(date/timestamp/string ts, string fmt)","Converts a date/timestamp/string to a value of string in the format specified by the date format fmt . Supported formats are Java SimpleDateFormat formats?.The second argument fmt should be constant. Example: date_format('2015-04-08', 'y') = '2015'.",now(),"",now()); +-- 聚合函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","聚合函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="聚合函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sum","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column sum(Column e)"," returns the sum of all values in the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","skewness","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "skewness"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column skewness(Column e)","returns the skewness of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column stddev(Column e)","alias for stddev_samp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sumDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sumDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column sumDistinct(Column e)","returns the sum of distinct values in the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","countDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "countDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column countDistinct(String columnName,String... columnNames)","returns the number of distinct items in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column first(Column e)"," returns the first value in a group.The function by default returns the first values it sees. It will return the first non-null value it sees when ignoreNulls is set to true. If all values are null, then null is returned.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","grouping_id","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "grouping_id"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column grouping_id(String colName,scala.collection.Seq colNames)","returns the level of grouping,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","grouping","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "grouping"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column grouping(String columnName)","indicates whether a specified column in a GROUP BY list is aggregated or not, returns 1 for aggregated or 0 for not aggregated in the result set.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","kurtosis","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "kurtosis"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column kurtosis(Column e)","returns the kurtosis of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column last(Column e,boolean ignoreNulls)","returns the last value in a group.The function by default returns the last values it sees. It will return the last non-null value it sees when ignoreNulls is set to true. If all values are null, then null is returned.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mean","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mean"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column mean(String columnName)","returns the average of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","count","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "count"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT count([DISTINCT] col)","count(*) - Returns the total number of retrieved rows, including rows containing NULL values. count(expr) - Returns the number of rows for which the supplied expression is non-NULL. count(DISTINCT expr[, expr]) - Returns the number of rows for which the s",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sum","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sum(col), sum(DISTINCT col)","Returns the sum of the elements in the group or the sum of the distinct values of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","avg","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "avg"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE avg(col), avg(DISTINCT col)","Returns the average of the elements in the group or the average of the distinct values of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","min","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "min"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE min(col)","Returns the minimum of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","max","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "max"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE max(col)","Returns the maximum value of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","variance","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "variance"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE variance(col)","Returns the variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","var_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "var_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE var_pop(col)","Returns the variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","var_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "var_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE var_samp(col)","Returns the unbiased sample variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE stddev_pop(col)","Returns the standard deviation of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE stddev_samp(col)","Returns the unbiased sample standard deviation of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","covar_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "covar_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE covar_pop(col1, col2)","Returns the population covariance of a pair of numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","covar_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "covar_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE covar_samp(col1, col2)","Returns the sample covariance of a pair of a numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","corr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "corr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE corr(col1, col2)","Returns the Pearson coefficient of correlation of a pair of a numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE percentile(BIGINT col, p)","Returns the exact pth?percentile of a column in the group (does not work with floating point types). p must be between 0 and 1. NOTE: A true percentile can only be computed for integer values. Use PERCENTILE_APPROX if your input is non-integral.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array percentile(BIGINT col, array(p1?[, p2]...))","Returns the exact percentiles p1, p2, ... of a column in the group (does not work with floating point types). pi?must be between 0 and 1. NOTE: A true percentile can only be computed for integer values. Use PERCENTILE_APPROX if your input is non-integral.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile_approx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile_approx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE percentile_approx(DOUBLE col, p [, B])","Returns an approximate pth?percentile of a numeric column (including floating point types) in the group. The B parameter controls approximation accuracy at the cost of memory. Higher values yield better approximations, and the default is 10,000. When the ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile_approx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile_approx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array percentile_approx(DOUBLE col, array(p1?[, p2]...) [, B])","Same as above, but accepts and returns an array of percentile values instead of a single one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_avgx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_avgx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_avgx(independent, dependent)","Equivalent to avg(dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_avgy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_avgy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_avgy(independent, dependent)","Equivalent to avg(independent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_count","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_count"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_count(independent, dependent)","Returns the number of non-null pairs used to fit the linear regression line.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_intercept","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_intercept"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_intercept(independent, dependent)","Returns the y-intercept of the?linear regression line, i.e. the value of b in the equation dependent = a * independent + b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_r2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_r2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_r2(independent, dependent)","Returns the?coefficient of determination?for the regression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_slope","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_slope"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_slope(independent, dependent)","Returns the slope of the?linear regression line, i.e. the value of a in the equation dependent = a * independent + b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_sxx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_sxx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_sxx(independent, dependent)","Equivalent to regr_count(independent, dependent) * var_pop(dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_sxy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_sxy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_sxy(independent, dependent)","Equivalent to regr_count(independent, dependent) * covar_pop(independent, dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_syy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_syy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_syy(independent, dependent)","Equivalent to regr_count(independent, dependent) * var_pop(independent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","histogram_numeric","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "histogram_numeric"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array histogram_numeric(col, b)","Computes a histogram of a numeric column in the group using b non-uniformly spaced bins. The output is an array of size b of double-valued (x,y) coordinates that represent the bin centers and heights",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","collect_set","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "collect_set"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array collect_set(col)","Returns a set of objects with duplicate elements eliminated.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","collect_list","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "collect_list"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array collect_list(col)","Returns a list of objects with duplicates. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ntile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ntile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","INTEGER ntile(INTEGER x)","Divides an ordered partition into?x?groups called buckets and assigns a bucket number to each row in the partition. This?allows easy calculation of tertiles, quartiles, deciles, percentiles and other?common summary statistics. ",now(),"",now()); +-- 条件判断函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","条件判断函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="条件判断函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","not","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "not"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column not(Column e)","Inversion of boolean expression,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","when","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "when"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column when(Column condition,Object value)","Evaluates a list of conditions and returns one of multiple possible result expressions. If otherwise is not defined at the end, null is returned for unmatched conditions.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnan","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column isnan(Column e)","Return true iff the column is NaN.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nanvl","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nanvl"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column nanvl(Column col1, Column col2)","Returns col1 if it is not NaN, or col2 if col1 is NaN.Both inputs should be floating point columns (DoubleType or FloatType).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","point","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "point"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","columns (DoubleType or FloatType)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","if","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "if"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T if(boolean testCondition, T valueTrue, T valueFalseOrNull)","Returns valueTrue when testCondition is true, returns valueFalseOrNull otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnull","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnull"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean isnull( a )","Returns true if a is NULL and false otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnotnull ","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnotnull "; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean isnotnull ( a )","Returns true if a is not NULL and false otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nvl","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nvl"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T nvl(T value, T default_value)","Returns default value if value is null else returns value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","coalesce","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "coalesce"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T coalesce(T v1, T v2, ...)","Returns the first v that is not NULL, or NULL if all v's are NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nullif","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nullif"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T nullif( a, b )","Returns NULL if a=b; otherwise returns a?.Shorthand for: CASE?WHEN a = b then NULL else a",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","assert_true","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "assert_true"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","void assert_true(boolean condition)","Throw an exception if 'condition' is not true, otherwise return null . For example, select assert_true (2<1).",now(),"",now()); +-- 类型转换函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","类型转换函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="类型转换函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","binary","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "binary"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary binary(string|binary)","Casts the parameter into a binary.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cast","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cast"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Expected = to follow type cast(expr as )","Converts the results of the expression expr to . For example, cast('1' as BIGINT) will convert the string '1' to its integral representation. A null is returned if the conversion does not succeed. If cast(expr as boolean) Hive returns true for a non",now(),"",now()); +-- 集合操作函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","集合操作函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="集合操作函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","struct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "struct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column struct(scala.collection.Seq cols)","Creates a new struct column. If the input column is a column in a DataFrame, or a derived column expression that is named (i.e. aliased), its name would be remained as the StructField's name, otherwise, the newly generated StructField's name would be auto",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","col","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "col"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column col(String colName)","Returns a Column based on the given column name.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","column","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "column"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column column(String colName)","Returns a Column based on the given column name. Alias of col.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc_nulls_first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc_nulls_first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc_nulls_first(String columnName)","Returns a sort expression based on the descending order of the column, and null values appear before non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc_nulls_last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc_nulls_last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc_nulls_last(String columnName)","Returns a sort expression based on the descending order of the column, and null values appear after non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc(String columnName)","Returns a sort expression based on the descending order of the column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","array","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "array"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array(val1, val2, ...)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","map(key1, value1, ...)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","size","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "size"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int size(Map|Array a)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map_keys","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map_keys"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array map_keys(Map)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map_values","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map_values"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array map_values(Map)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","array_contains","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "array_contains"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean array_contains(Array, value)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sort_array","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sort_array"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array sort_array(Array)","",now(),"",now()); +-- 数据加密函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","数据加密函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="数据加密函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","base64","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "base64"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column base64(Column e)","Computes the BASE64 encoding of a binary column and returns it as a string column. This is the reverse of unbase64.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask(string str[, string upper[, string lower[, string number]]])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_first_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_first_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_first_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_last_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_last_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_last_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_show_first_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_show_first_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_show_first_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_show_last_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_show_last_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_show_last_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_hash","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_hash"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_hash(string|char|varchar str)","",now(),"",now()); +-- 生成表函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","生成表函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="生成表函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","explode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "explode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T explode(Array|Array|Map a)","Explodes an array or map to multiple rows. Returns a row-set with a single column (col), one row for each element from the array or ?a row-set with a two columns (key,value)?,?one row for each key-value pair from the input map",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","posexplode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "posexplode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int,T posexplode(ARRAY a)","Explodes an array to multiple rows with additional positional column of?int?type (position of items in the original array, starting with 0). Returns a row-set with two columns (pos,val), one row for each element from the array.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","inline","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "inline"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T1,...,Tn inline(ARRAY> a)","Explodes an array of structs to multiple rows.?Returns?a row-set with N columns (N = number of top level elements in the struct), one row per struct from the array. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stack","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stack"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T1,...,Tn/r stack(int r,T1?V1,...,Tn/r?Vn)","Breaks up?n?values V1,...,Vn?into?r?rows. Each row will have?n/r?columns.?r?must be constant.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","json_tuple","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "json_tuple"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string1,...,stringn json_tuple(string?jsonStr,string?k1,...,string?kn)","Takes?JSON string and?a set of?n?keys, and returns a tuple of?n?values. This is a more efficient version of the?get_json_object?UDF because it can get multiple keys with just one call.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","parse_url_tuple","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "parse_url_tuple"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string?1,...,stringn parse_url_tuple(string?urlStr,string?p1,...,string?pn)","Takes?URL string and?a set of?n?URL parts, and returns a tuple of?n?values.?This is similar to the?parse_url()?UDF but can extract multiple parts at once out of a URL. Valid part names are: HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, USERINFO, QUER",now(),"",now()); +-- 分析窗口函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","分析窗口函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="分析窗口函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dense_rank","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dense_rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column dense_rank()","returns the rank of rows within a window partition, without any gaps.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dense_rank ","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dense_rank "; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","dense_rank ( ) OVER ( [query_partition_clause] order_by_clause )","Returns an ascending sequence of integers, starting with 1. The output sequence produces duplicate integers for duplicate values of the ORDER BY expressions.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","first_value","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "first_value"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," first_value(expr) OVER([partition_by_clause] order_by_clause [window_clause])","Returns the expression value from the first row in the window. The return value is NULL if the input expression is NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lag","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lag"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," lag(expr [, offset] [, default]) OVER ([partition_by_clause] order_by_clause)","This function returns the value of an expression using column values from a preceding row. You specify an integer offset, which designates a row position some number of rows previous to the current row. Any column references in the expression argument ref",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_value","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_value"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," last_value(expr) OVER([partition_by_clause] order_by_clause [window_clause])","Returns the expression value from the last row in the window. The return value is NULL if the input expression is NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lead","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lead"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," lead(expr [, offset] [, default]) OVER([partition_by_clause] order_by_clause)","This function returns the value of an expression using column values from a following row. You specify an integer offset, which designates a row position some number of rows after to the current row. Any column references in the expression argument refer ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ntile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ntile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," ntile(int n) OVER([partition_by_clause] order_by_clause)","用于将分组数据按照顺序切分成n片,返回当前切片值",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percent_rank","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percent_rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," percent_rank() OVER([partition_by_clause] order_by_clause)","分组内当前行的RANK值-1/分组内总行数-1",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cume_dist","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cume_dist"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," cume_dist() OVER([partition_by_clause] order_by_clause)","小于等于当前值的行数/分组内总行数",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rank","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," rank() OVER([partition_by_clause] order_by_clause)","Returns an ascending sequence of integers, starting with 1. The output sequence produces duplicate integers for duplicate values of the ORDER BY expressions. After generating duplicate output values for the tied input values, the function increments the s",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","row_number","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "row_number"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," row_number() OVER([partition_by_clause] order_by_clause)","Returns an ascending sequence of integers, starting with 1. Starts the sequence over for each group produced by the PARTITIONED BY clause. The output sequence includes different values for duplicate input values. Therefore, the sequence never contains any",now(),"",now()); +-- 其它函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","其它函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="其它函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","spark_partition_id","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "spark_partition_id"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column spark_partition_id()","returns partition ID.This is indeterministic because it depends on data partitioning and task scheduling.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_json","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_json"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column to_json(Column e,scala.collection.immutable.Map options)","(Scala-specific) Converts a column containing a StructType into a JSON string with the specified schema. Throws an exception, in the case of an unsupported type.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","window","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "window"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column window(Column timeColumn, String windowDuration, String slideDuration)","Bucketize rows into one or more time windows given a timestamp specifying column. Window starts are inclusive but the window ends are exclusive, e.g. 12:05 will be in the window [12:05,12:10) but not in [12:00,12:05). Windows can support microsecond preci",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","approxCountDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "approxCountDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column approxCountDistinct(Column e)"," Use approx_count_distinct. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc_nulls_first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc_nulls_first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc_nulls_first(String columnName)","Returns a sort expression based on ascending order of the column, and null values return before non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc_nulls_last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc_nulls_last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc_nulls_last(String columnName)","Returns a sort expression based on ascending order of the column, and null values appear after non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc(String columnName)","Returns a sort expression based on ascending order of the column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bitwiseNOT","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bitwiseNOT"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expr(String expr)","Parses the expression string into the column that it represents, similar to DataFrame.selectExpr",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","broadcast","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "broadcast"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Dataset broadcast(Dataset df)","Marks a DataFrame as small enough for use in broadcast joins.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","callUDF","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "callUDF"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column callUDF(String udfName, scala.collection.Seq cols)","Call an user-defined function.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_json","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_json"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column from_json(Column e,StructType schema,scala.collection.immutable.Map options)","(Scala-specific) Parses a column containing a JSON string into a StructType with the specified schema. Returns null, in the case of an unparseable string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lit","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lit"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column lit(Object literal)","Creates a Column of literal value.The passed in object is returned directly if it is already a Column. If the object is a Scala Symbol, it is converted into a Column also. Otherwise, a new Column is created to represent the literal value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","md5","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "md5"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string md5(string/binary)","Calculates an MD5 128-bit checksum for the string or binary . The value is returned as a string of 32 hex digits, or NULL if the argument was NULL. Example: md5('ABC') = '902fbdd2b1df0c4f70b4a5d23525e932'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha1","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha1"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha1(string/binary)","Calculates the SHA-1 digest for string or binary and returns the value as a hex string . Example: sha1('ABC') = '3c01bdbb26f358bab27f267924aa2c9a03fcfdb8'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha(string/binary)","Calculates the SHA-1 digest for string or binary and returns the value as a hex string . Example: sha1('ABC') = '3c01bdbb26f358bab27f267924aa2c9a03fcfdb8'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha2(string/binary, int)","Calculates the SHA-2 family of hash functions (SHA-224, SHA-256, SHA-384, and SHA-512) . The first argument is the string or binary to be hashed. The second argument indicates the desired bit length of the result, which must have a value of 224, 256, 384,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","reflect","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "reflect"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","varies reflect(class, method[, arg1[, arg2..]])","Calls a Java method by matching the argument signature, using reflection. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","crc32","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "crc32"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint crc32(string/binary)","Computes a cyclic redundancy check value for string or binary argument and returns bigint value . Example: crc32('ABC') = 2743272264.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","aes_decrypt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "aes_decrypt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary aes_decrypt(input binary, key string/binary)","Decrypt input using AES . Key lengths of 128, 192 or 256 bits can be used. 192 and 256 bits keys can be used if Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files are installed. If either argument is NULL or the key length is n",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","aes_encrypt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "aes_encrypt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary aes_encrypt(input string/binary, key string/binary)","Encrypt input using AES . Key lengths of 128, 192 or 256 bits can be used. 192 and 256 bits keys can be used if Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files are installed. If either argument is NULL or the key length is n",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hash","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hash"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int hash(a1[, a2...])","Returns a hash value of the arguments.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_database","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_database"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string current_database()","Returns current database name .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_user","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_user"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string current_user()","Returns current user name from the configured authenticator manager?. Could be the same as the user provided when connecting, but with some authentication managers (for example HadoopDefaultAuthenticator) it could be different.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","logged_in_user","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "logged_in_user"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string logged_in_user()","Returns current user name from the session state. This is the username provided when connecting to Hive.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","version","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "version"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string version()","Returns the Hive version. The string contains 2 fields, the first being a build number and the second being a build hash. Example: select version(); might return 2.1.0.2.5.0.0-1245 r027527b9c5ce1a3d7d0b6d2e6de2378fb0c39232. Actual results will depend on y",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","java_method","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "java_method"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","varies java_method(class, method[, arg1[, arg2..]])","Synonym for?reflect. ",now(),"",now()); + + + + + + + diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobDescription.java b/linkis-dist/package/db/upgrade/1.4.0_schema/mysql/linkis_ddl.sql similarity index 73% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobDescription.java rename to linkis-dist/package/db/upgrade/1.4.0_schema/mysql/linkis_ddl.sql index f6fa3e3217..0f1cda8793 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/job/JobDescription.java +++ b/linkis-dist/package/db/upgrade/1.4.0_schema/mysql/linkis_ddl.sql @@ -6,7 +6,7 @@ * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,7 +15,5 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.job; - -/** Should store all input argument and configurations */ -public interface JobDescription {} +ALTER TABLE `linkis_cg_manager_service_instance` ADD COLUMN `identifier` varchar(32) COLLATE utf8_bin DEFAULT NULL; +ALTER TABLE `linkis_cg_manager_service_instance` ADD COLUMN `ticketId` varchar(255) COLLATE utf8_bin DEFAULT NULL; \ No newline at end of file diff --git a/linkis-dist/package/db/upgrade/1.4.0_schema/mysql/linkis_dml.sql b/linkis-dist/package/db/upgrade/1.4.0_schema/mysql/linkis_dml.sql new file mode 100644 index 0000000000..a226d806db --- /dev/null +++ b/linkis-dist/package/db/upgrade/1.4.0_schema/mysql/linkis_dml.sql @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +update linkis_ps_error_code set error_code = "01104" where error_regex ='ECM Insufficient number of instances'; +update linkis_ps_error_code set error_code = "01105" where error_regex ='Cannot allocate memory'; + + + + + + + diff --git a/linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_ddl.sql b/linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_ddl.sql new file mode 100644 index 0000000000..4cf27a3d4a --- /dev/null +++ b/linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_ddl.sql @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +ALTER TABLE `linkis_ps_udf_user_load` ADD CONSTRAINT `uniq_uid_uname` UNIQUE (`udf_id`, `user_name`); +ALTER TABLE `linkis_ps_bml_resources` ADD CONSTRAINT `uniq_rid_eflag` UNIQUE (`resource_id`, `enable_flag`); + + +ALTER TABLE `linkis_ps_configuration_config_key` ADD UNIQUE `uniq_key_ectype` (`key`,`engine_conn_type`); + +ALTER TABLE `linkis_ps_configuration_config_key` modify column `engine_conn_type` varchar(50) DEFAULT '' COMMENT 'engine type,such as spark,hive etc'; + + diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionStatus.java b/linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_dml.sql similarity index 83% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionStatus.java rename to linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_dml.sql index 744ce3433a..80f6059975 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionStatus.java +++ b/linkis-dist/package/db/upgrade/1.4.1_schema/mysql/linkis_dml.sql @@ -6,7 +6,7 @@ * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, @@ -15,6 +15,6 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.result; +update linkis_ps_configuration_config_key set engine_conn_type = "" where engine_conn_type is NULL; + -public interface ExecutionStatus {} diff --git a/linkis-dist/package/sbin/ext/linkis-cg-linkismanager b/linkis-dist/package/sbin/ext/linkis-cg-linkismanager index 7a3849534a..9a3cd3317d 100644 --- a/linkis-dist/package/sbin/ext/linkis-cg-linkismanager +++ b/linkis-dist/package/sbin/ext/linkis-cg-linkismanager @@ -22,7 +22,7 @@ export SERVER_SUFFIX="linkis-computation-governance/linkis-cg-linkismanager" export SERVER_HEAP_SIZE="1G" -export SERVER_CLASS=org.apache.linkis.manager.am.LinkisManagerApplication +export SERVER_CLASS=org.apache.linkis.manager.LinkisManagerApplication if test -z "$MANAGER_HEAP_SIZE" then diff --git a/linkis-dist/package/sbin/kill-ec-process-by-port.sh b/linkis-dist/package/sbin/kill-ec-process-by-port.sh new file mode 100644 index 0000000000..8be2f5904a --- /dev/null +++ b/linkis-dist/package/sbin/kill-ec-process-by-port.sh @@ -0,0 +1,28 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +port=$1 +shellDir=`dirname $0` +workDir=`cd ${shellDir}/..;pwd` +if [ "$LINKIS_HOME" = "" ] +then + LINKIS_HOME=$workDir +fi +pid=`ps -ef | grep server.port=$port | grep EngineConnServer | awk '{print $2}'` +echo "`date '+%Y-%m-%d %H:%M:%S'` Get port $port pid is $pid" +if [ "$pid" != "" ] +then + sh $LINKIS_HOME/sbin/kill-process-by-pid.sh $pid +fi \ No newline at end of file diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/elasticsearch/src/main/resources/log4j2.xml index 020b94567e..0aecbbe740 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/elasticsearch/src/main/resources/log4j2.xml @@ -20,23 +20,23 @@ - + - + - + - + diff --git a/linkis-engineconn-plugins/flink/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/flink/src/main/resources/log4j2.xml index 4e2c961ae3..c3741cef34 100644 --- a/linkis-engineconn-plugins/flink/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/flink/src/main/resources/log4j2.xml @@ -20,7 +20,7 @@ - + @@ -31,12 +31,12 @@ - + - + diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala index 466dab95ed..857faeed8d 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkEnvConfiguration.scala @@ -99,6 +99,9 @@ object FlinkEnvConfiguration { val FLINK_ONCE_APP_STATUS_FETCH_INTERVAL = CommonVars("flink.app.fetch.status.interval", new TimeType("5s")) + val FLINK_ONCE_JAR_APP_REPORT_APPLICATIONID_INTERVAL = + CommonVars("flink.app.report.appid.interval", new TimeType("60s")) + val FLINK_ONCE_APP_STATUS_FETCH_FAILED_MAX = CommonVars("flink.app.fetch.status.failed.num", 3) val FLINK_REPORTER_ENABLE = CommonVars("linkis.flink.reporter.enable", false) @@ -118,6 +121,20 @@ object FlinkEnvConfiguration { val FLINK_KERBEROS_CONF_PATH = CommonVars("linkis.flink.kerberos.krb5-conf.path", "") val FLINK_PARAMS_BLANK_PLACEHOLER = - CommonVars("linkis.flink.params.placeholder.blank", "\\0x001") + CommonVars("linkis.flink.params.placeholder.blank", "\u0001") + + val FLINK_MANAGER_MODE_CONFIG_KEY = CommonVars("linkis.flink.manager.mode.on", false) + + val FLINK_MANAGER_LOAD_TASK_MAX = CommonVars("linkis.flink.manager.load.task.max", 50) + + val HADOOP_CONF_DIR = CommonVars("linkis.flink.hadoop.conf.dir", System.getenv("HADOOP_CONF_DIR")) + + val FLINK_MANAGER_CLIENT_MAX_NUM = CommonVars("linkis.flink.client.num.max", 200) + + val FLINK_MANAGER_CLIENT_EXPIRE_MILLS = + CommonVars("linkis.flink.client.expire.mills", 3600 * 1000) + + val FLINK_HANDSHAKE_WAIT_TIME_MILLS = + CommonVars("linkis.flink.handshake.wait.time.mills", 60 * 1000) } diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkSrpingConfiguration.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkSrpingConfiguration.scala new file mode 100644 index 0000000000..e0519e5483 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/config/FlinkSrpingConfiguration.scala @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.config + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.service.{ + EngineConnConcurrentLockService, + EngineConnTimedLockService, + LockService +} +import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext +import org.apache.linkis.engineconnplugin.flink.util.ManagerUtil + +import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean +import org.springframework.context.annotation.{Bean, Configuration} + +@Configuration +class FlinkSrpingConfiguration extends Logging { + + private val asyncListenerBusContext = + ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus + + @Bean(Array("lockService")) + @ConditionalOnMissingBean + def createLockManager(): LockService = { + + val lockService = + if (ManagerUtil.isManager) { + logger.info("Engine is manager, supports parallelism.") + new EngineConnConcurrentLockService + } else { + logger.info("Engine is not manager, doesn't support parallelism.") + new EngineConnTimedLockService + } + asyncListenerBusContext.addListener(lockService) + FlinkLockerServiceHolder.registerLockService(lockService) + lockService + } + +} + +object FlinkLockerServiceHolder extends Logging { + + private var lockService: LockService = _ + + def registerLockService(service: LockService): Unit = { + Utils.tryAndError { + if (null != service) { + if (null == lockService) { + logger.info(s"Will register lockService : ${service.getClass.getName}") + lockService = service + } else { + logger.warn( + s"Default lockService has been registered to ${lockService.getClass.getName}, will not register : ${service.getClass.getName}" + ) + } + } else { + logger.warn("Cannot register null lockService") + } + } + } + + def getDefaultLockService(): LockService = lockService + +} diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/constants/FlinkECConstant.scala similarity index 65% rename from linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala rename to linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/constants/FlinkECConstant.scala index 06a9c787c5..6b3a0d3562 100644 --- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/org/apache/linkis/ecm/server/service/YarnCallbackService.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/constants/FlinkECConstant.scala @@ -15,14 +15,23 @@ * limitations under the License. */ -package org.apache.linkis.ecm.server.service +package org.apache.linkis.engineconnplugin.flink.constants -import org.apache.linkis.protocol.callback.{YarnAPPIdCallbackProtocol, YarnInfoCallbackProtocol} +object FlinkECConstant { -trait YarnCallbackService { + val FLINK_MANAGER_OPERATION_TYPE_KEY = "operationType" - def dealApplicationId(protocol: YarnAPPIdCallbackProtocol): Unit + val FLINK_OPERATION_BOUNDARY_KEY = "operationBoundary" - def dealApplicationURI(protocol: YarnInfoCallbackProtocol): Unit + val EC_INSTANCE_KEY = "ecInstance" + val MSG_KEY = "msg" + + val SNAPHOT_KEY = "snapshot" + + val SAVAPOINT_PATH_KEY = "savepointPath" + + val SAVEPOINT_MODE_KEY = "mode" + + val RESULT_SAVEPOINT_KEY = "writtenSavepoint" } diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/context/EnvironmentContext.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/context/EnvironmentContext.scala index e5b9521289..fb28d6843b 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/context/EnvironmentContext.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/context/EnvironmentContext.scala @@ -51,6 +51,8 @@ class EnvironmentContext( private var deploymentTarget: YarnDeploymentTarget = YarnDeploymentTarget.PER_JOB + private var extraParams: util.Map[String, Any] = _ + def this( defaultEnv: Environment, systemConfiguration: Configuration, @@ -61,7 +63,8 @@ class EnvironmentContext( flinkLibRemotePath: String, providedLibDirsArray: Array[String], shipDirsArray: Array[String], - dependencies: util.List[URL] + dependencies: util.List[URL], + extraParams: util.Map[String, Any] ) { this( defaultEnv, @@ -86,6 +89,8 @@ class EnvironmentContext( this.flinkConfig.set(LinkisYarnClusterClientFactory.YARN_CONFIG_DIR, this.yarnConfDir) // set flink dist-jar(设置 flink dist jar) this.flinkConfig.set(YarnConfigOptions.FLINK_DIST_JAR, distJarPath) + // other params + this.extraParams = extraParams } def setDeploymentTarget(deploymentTarget: YarnDeploymentTarget): Unit = this.deploymentTarget = @@ -111,6 +116,13 @@ class EnvironmentContext( def getDependencies: util.List[URL] = dependencies + def setExtraParams(params: util.Map[String, Any]): EnvironmentContext = { + this.extraParams = params + this + } + + def getExtraParams(): util.Map[String, Any] = extraParams + override def equals(o: Any): Boolean = o match { case context: EnvironmentContext => if (this eq context) return true diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkJarOnceExecutor.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkJarOnceExecutor.scala index ea72cd9b69..28ea52656d 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkJarOnceExecutor.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkJarOnceExecutor.scala @@ -18,13 +18,24 @@ package org.apache.linkis.engineconnplugin.flink.executor import org.apache.linkis.common.utils.Utils +import org.apache.linkis.engineconn.acessible.executor.service.ExecutorHeartbeatServiceHolder +import org.apache.linkis.engineconn.executor.service.ManagerService import org.apache.linkis.engineconn.once.executor.OnceExecutorExecutionContext import org.apache.linkis.engineconnplugin.flink.client.deployment.YarnApplicationClusterDescriptorAdapter +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration._ import org.apache.linkis.engineconnplugin.flink.context.FlinkEngineConnContext +import org.apache.linkis.engineconnplugin.flink.operator.StatusOperator +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil +import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.commons.lang3.StringUtils +import java.util +import java.util.concurrent.{Future, TimeUnit} + import scala.concurrent.duration.Duration class FlinkJarOnceExecutor( @@ -32,6 +43,10 @@ class FlinkJarOnceExecutor( override protected val flinkEngineConnContext: FlinkEngineConnContext ) extends FlinkOnceExecutor[YarnApplicationClusterDescriptorAdapter] { + private var daemonThread: Future[_] = _ + + private var firstReportAppIdTimestampMills: Long = 0L + override def doSubmit( onceExecutorExecutionContext: OnceExecutorExecutionContext, options: Map[String, String] @@ -48,6 +63,73 @@ class FlinkJarOnceExecutor( Utils.waitUntil(() => clusterDescriptor.initJobId(), Duration.Inf) setJobID(clusterDescriptor.getJobId.toHexString) super.waitToRunning() + if (YarnUtil.isDetach(flinkEngineConnContext.getEnvironmentContext.getExtraParams())) { + waitToExit() + } + } + + override def close(): Unit = { + super.close() + if (null != daemonThread) { + daemonThread.cancel(true) + } + } + + override protected def closeYarnApp(): Unit = { + if (YarnUtil.isDetach(flinkEngineConnContext.getEnvironmentContext.getExtraParams())) { + if (getStatus == NodeStatus.Failed) { + logger.info("Will kill yarn app on close with clientType : detach, because status failed.") + super.closeYarnApp() + } else { + logger.info("Skip to kill yarn app on close with clientType : detach.") + } + } else { + logger.info("Will kill yarn app on close with clientType : attach.") + super.closeYarnApp() + } + } + + private def waitToExit(): Unit = { + // upload applicationId to manager and then exit + val thisExecutor = this + if (!isCompleted) { + daemonThread = Utils.defaultScheduler.scheduleWithFixedDelay( + new Runnable { + override def run(): Unit = { + if (!isCompleted) { + Utils.waitUntil(() => StringUtils.isNotBlank(getApplicationId), Duration.apply("10s")) + if (StringUtils.isNotBlank(getApplicationId)) { + Utils.tryAndWarn { + val heartbeatService = ExecutorHeartbeatServiceHolder.getDefaultHeartbeatService() + if (null == heartbeatService) { + logger.error("HeartbeatService not inited.") + return null + } + val heartbeatMsg = heartbeatService.generateHeartBeatMsg(thisExecutor) + ManagerService.getManagerService.heartbeatReport(heartbeatMsg) + logger.info( + s"Succeed to report heatbeatMsg : ${heartbeatMsg.getHeartBeatMsg}, will add handshake." + ) + if (0L >= firstReportAppIdTimestampMills) { + firstReportAppIdTimestampMills = System.currentTimeMillis() + } + if (!StatusOperator.isHandshaked) { + StatusOperator.addHandshake() + } else { + logger.info("Will exit with handshaked.") + trySucceed() + } + } + } + } + } + }, + 1000, + FlinkEnvConfiguration.FLINK_ONCE_JAR_APP_REPORT_APPLICATIONID_INTERVAL.getValue.toLong, + TimeUnit.MILLISECONDS + ) + logger.info("waitToExit submited.") + } } } diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkManagerConcurrentExecutor.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkManagerConcurrentExecutor.scala new file mode 100644 index 0000000000..b204c2405a --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkManagerConcurrentExecutor.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.executor + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration +import org.apache.linkis.engineconn.acessible.executor.service.EngineConnConcurrentLockService +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.computation.executor.execute.{ + ComputationExecutor, + ConcurrentComputationExecutor, + EngineExecutionContext +} +import org.apache.linkis.engineconn.once.executor.OnceExecutorExecutionContext +import org.apache.linkis.engineconnplugin.flink.client.deployment.ClusterDescriptorAdapter +import org.apache.linkis.engineconnplugin.flink.config.FlinkLockerServiceHolder +import org.apache.linkis.engineconnplugin.flink.context.FlinkEngineConnContext +import org.apache.linkis.engineconnplugin.flink.errorcode.FlinkErrorCodeSummary +import org.apache.linkis.engineconnplugin.flink.exception.JobExecutionException +import org.apache.linkis.engineconnplugin.flink.util.ManagerUtil +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.protocol.engine.JobProgressInfo +import org.apache.linkis.scheduler.executer.{ + AsynReturnExecuteResponse, + ErrorExecuteResponse, + ExecuteResponse +} + +class FlinkManagerConcurrentExecutor( + val id: Long, + maxRunningNumber: Int, + val flinkEngineConnContext: FlinkEngineConnContext +) extends FlinkOnceExecutor[ClusterDescriptorAdapter] + with FlinkExecutor + with Logging { + + override protected def submit( + onceExecutorExecutionContext: OnceExecutorExecutionContext + ): Unit = { + logger.info("Succeed to init FlinkManagerExecutor.") + } + + override def execute( + onceExecutorExecutionContext: OnceExecutorExecutionContext + ): ExecuteResponse = { + val isManager = ManagerUtil.isManager + val lockService = FlinkLockerServiceHolder.getDefaultLockService() + if ( + isManager && null != lockService && lockService + .isInstanceOf[EngineConnConcurrentLockService] + ) { + val msg = "Succeed to init FlinkManagerExecutor." + logger.info(msg) + new AsynReturnExecuteResponse { + override def notify(rs: ExecuteResponse => Unit): Unit = { + logger.info(s"FlinkManagerExecutor will skip listener : ${rs}") + } + } + } else { + ErrorExecuteResponse( + "FlinkManagerExecutor got default lockService is not instance of EngineConnConcurrentLockService, will shutdown.", + null + ) + } + } + + override def getId: String = id.toString + + override def close(): Unit = { + logger.info(s"FlinkManagerExecutor : ${getId} will close.") + super.close() + } + + def getMaxRunningNumber: Int = maxRunningNumber + + def getFlinkContext(): FlinkEngineConnContext = flinkEngineConnContext + + override def doSubmit( + onceExecutorExecutionContext: OnceExecutorExecutionContext, + options: Map[String, String] + ): Unit = submit(onceExecutorExecutionContext) + + override protected def initOnceExecutorExecutionContext( + onceExecutorExecutionContext: OnceExecutorExecutionContext + ): Unit = {} + + override protected def createOnceExecutorExecutionContext( + engineCreationContext: EngineCreationContext + ): OnceExecutorExecutionContext = new OnceExecutorExecutionContext(null, null) + + override def tryReady(): Boolean = { + // set default status to Unlock + transition(NodeStatus.Unlock) + true + } + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkOnceExecutor.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkOnceExecutor.scala index 411f574e7f..c18b86192d 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkOnceExecutor.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/FlinkOnceExecutor.scala @@ -32,6 +32,7 @@ import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration.{ } import org.apache.linkis.engineconnplugin.flink.errorcode.FlinkErrorCodeSummary._ import org.apache.linkis.engineconnplugin.flink.exception.ExecutorInitException +import org.apache.linkis.engineconnplugin.flink.executor.interceptor.FlinkJobSubmitInterceptor import org.apache.linkis.manager.common.entity.enumeration.NodeStatus import org.apache.flink.api.common.JobStatus @@ -47,6 +48,8 @@ trait FlinkOnceExecutor[T <: ClusterDescriptorAdapter] protected var clusterDescriptor: T = _ private var daemonThread: Future[_] = _ + private var interceptor: FlinkJobSubmitInterceptor = _ + protected def submit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit = { ClusterDescriptorAdapterFactory.create(flinkEngineConnContext.getExecutionContext) match { case adapter: T => clusterDescriptor = adapter @@ -58,7 +61,14 @@ trait FlinkOnceExecutor[T <: ClusterDescriptorAdapter] case (k, v) if v != null => k -> v.toString case (k, _) => k -> null }.toMap - doSubmit(onceExecutorExecutionContext, options) + Option(interceptor).foreach(op => op.beforeSubmit(onceExecutorExecutionContext)) + Utils.tryCatch { + doSubmit(onceExecutorExecutionContext, options) + Option(interceptor).foreach(op => op.afterSubmitSuccess(onceExecutorExecutionContext)) + } { t: Throwable => + Option(interceptor).foreach(op => op.afterSubmitFail(onceExecutorExecutionContext, t)) + throw t + } if (isCompleted) return if (null == clusterDescriptor.getClusterID) { throw new ExecutorInitException(YARN_IS_NULL.getErrorDesc) @@ -82,15 +92,17 @@ trait FlinkOnceExecutor[T <: ClusterDescriptorAdapter] def getClusterDescriptorAdapter: T = clusterDescriptor + def setSubmitInterceptor(interceptor: FlinkJobSubmitInterceptor): Unit = { + this.interceptor = interceptor + } + override def getId: String = "FlinkOnceApp_" + id protected def closeDaemon(): Unit = { if (daemonThread != null) daemonThread.cancel(true) } - override def close(): Unit = { - super.close() - closeDaemon() + protected def closeYarnApp(): Unit = { if (clusterDescriptor != null) { clusterDescriptor.cancelJob() clusterDescriptor.close() @@ -98,6 +110,12 @@ trait FlinkOnceExecutor[T <: ClusterDescriptorAdapter] flinkEngineConnContext.getExecutionContext.getClusterClientFactory.close() } + override def close(): Unit = { + super.close() + closeDaemon() + closeYarnApp() + } + override protected def waitToRunning(): Unit = { if (!isCompleted) { daemonThread = Utils.defaultScheduler.scheduleAtFixedRate( diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/interceptor/FlinkJobResCleanInterceptor.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/interceptor/FlinkJobResCleanInterceptor.scala new file mode 100644 index 0000000000..b33f005ca9 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/interceptor/FlinkJobResCleanInterceptor.scala @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.executor.interceptor + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.once.executor.OnceExecutorExecutionContext +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration.{ + FLINK_APPLICATION_MAIN_CLASS_JAR, + FLINK_SHIP_DIRECTORIES +} +import org.apache.linkis.engineconnplugin.flink.resource.FlinkJobResourceCleaner + +import org.apache.commons.lang3.StringUtils + +import java.util + +/** + * Flink Job resource clean interceptor + */ +class FlinkJobResCleanInterceptor(cleaner: FlinkJobResourceCleaner) + extends FlinkJobSubmitInterceptor + with Logging { + + /** + * Before submit the job + * + * @param onceExecutorExecutionContext + * execution context + */ + override def beforeSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit = { + // Do nothing + + } + + /** + * After success to submit the job + * + * @param onceExecutorExecutionContext + * execution context + */ + override def afterSubmitSuccess( + onceExecutorExecutionContext: OnceExecutorExecutionContext + ): Unit = { + logger.info("Clean the flink job resource after success to submit") + cleanResources(onceExecutorExecutionContext.getEngineCreationContext.getOptions) + } + + /** + * After fail to submit the job + * + * @param onceExecutorExecutionContext + * execution context + * @param throwable + * throwable + */ + override def afterSubmitFail( + onceExecutorExecutionContext: OnceExecutorExecutionContext, + throwable: Throwable + ): Unit = { + logger.info("Clean the flink job resource after fail to submit") + cleanResources(onceExecutorExecutionContext.getEngineCreationContext.getOptions) + } + + private def cleanResources(options: util.Map[String, String]): Unit = { + val mainClassJar = FLINK_APPLICATION_MAIN_CLASS_JAR.getValue(options) + logger.trace(s"mainClassJar to clean: ${mainClassJar}") + if (StringUtils.isNotBlank(mainClassJar) && cleaner.accept(mainClassJar)) { + cleaner.cleanup(Array(mainClassJar)) + } + val shipDirsArray = FLINK_SHIP_DIRECTORIES.getValue(options).split(",") + logger.trace(s"Ship directories to clean: ${shipDirsArray.length}") + shipDirsArray match { + case resArray: Array[String] => cleaner.cleanup(resArray.filter(cleaner.accept)) + case _ => + } + } + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/interceptor/FlinkJobSubmitInterceptor.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/interceptor/FlinkJobSubmitInterceptor.scala new file mode 100644 index 0000000000..1c11cc9d0e --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/executor/interceptor/FlinkJobSubmitInterceptor.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.executor.interceptor + +import org.apache.linkis.engineconn.once.executor.OnceExecutorExecutionContext + +trait FlinkJobSubmitInterceptor { + + /** + * Before submit the job + * @param onceExecutorExecutionContext + * execution context + */ + def beforeSubmit(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit + + /** + * After success to submit the job + * @param onceExecutorExecutionContext + * execution context + */ + def afterSubmitSuccess(onceExecutorExecutionContext: OnceExecutorExecutionContext): Unit + + /** + * After fail to submit the job + * @param onceExecutorExecutionContext + * execution context + * @param throwable + * throwable + */ + def afterSubmitFail( + onceExecutorExecutionContext: OnceExecutorExecutionContext, + throwable: Throwable + ): Unit + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkApplicationExecutorFactory.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkApplicationExecutorFactory.scala index 1b8c2ad5b5..a5476edf88 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkApplicationExecutorFactory.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkApplicationExecutorFactory.scala @@ -23,12 +23,20 @@ import org.apache.linkis.engineconn.once.executor.OnceExecutor import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorFactory import org.apache.linkis.engineconnplugin.flink.context.FlinkEngineConnContext import org.apache.linkis.engineconnplugin.flink.executor.FlinkJarOnceExecutor +import org.apache.linkis.engineconnplugin.flink.executor.interceptor.FlinkJobResCleanInterceptor +import org.apache.linkis.engineconnplugin.flink.resource.FlinkJobLocalResourceCleaner import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine.RunType import org.apache.linkis.manager.label.entity.engine.RunType.RunType class FlinkApplicationExecutorFactory extends OnceExecutorFactory { + /** + * Interceptor + * @return + */ + private def submitInterceptor = new FlinkJobResCleanInterceptor(new FlinkJobLocalResourceCleaner) + override protected def newExecutor( id: Int, engineCreationContext: EngineCreationContext, @@ -36,7 +44,9 @@ class FlinkApplicationExecutorFactory extends OnceExecutorFactory { labels: Array[Label[_]] ): OnceExecutor = engineConn.getEngineConnSession match { case context: FlinkEngineConnContext => - new FlinkJarOnceExecutor(id, context) + val executor = new FlinkJarOnceExecutor(id, context) + executor.setSubmitInterceptor(submitInterceptor) + executor } override protected def getRunType: RunType = RunType.JAR diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala index 599f87e69c..35961a652f 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkEngineConnFactory.scala @@ -17,8 +17,11 @@ package org.apache.linkis.engineconnplugin.flink.factory +import org.apache.linkis.common.conf.CommonVars import org.apache.linkis.common.utils.{ClassUtils, Logging} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.launch.EngineConnServer import org.apache.linkis.engineconnplugin.flink.client.config.Environment import org.apache.linkis.engineconnplugin.flink.client.config.entries.ExecutionEntry import org.apache.linkis.engineconnplugin.flink.client.context.ExecutionContext @@ -29,7 +32,8 @@ import org.apache.linkis.engineconnplugin.flink.context.{EnvironmentContext, Fli import org.apache.linkis.engineconnplugin.flink.errorcode.FlinkErrorCodeSummary._ import org.apache.linkis.engineconnplugin.flink.exception.FlinkInitFailedException import org.apache.linkis.engineconnplugin.flink.setting.Settings -import org.apache.linkis.engineconnplugin.flink.util.ClassUtil +import org.apache.linkis.engineconnplugin.flink.util.{ClassUtil, ManagerUtil} +import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.manager.engineplugin.common.conf.EnvConfiguration import org.apache.linkis.manager.engineplugin.common.creation.{ ExecutorFactory, @@ -38,6 +42,7 @@ import org.apache.linkis.manager.engineplugin.common.creation.{ import org.apache.linkis.manager.label.entity.Label import org.apache.linkis.manager.label.entity.engine._ import org.apache.linkis.manager.label.entity.engine.EngineType.EngineType +import org.apache.linkis.protocol.utils.TaskUtils import org.apache.commons.lang3.StringUtils import org.apache.flink.configuration._ @@ -102,6 +107,16 @@ class FlinkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging val providedLibDirsArray = FLINK_LIB_LOCAL_PATH.getValue(options).split(",") // Ship directories val shipDirsArray = getShipDirectories(options) + // other params + val flinkClientType = GovernanceCommonConf.EC_APP_MANAGE_MODE.getValue(options) + val otherParams = new util.HashMap[String, Any]() + val isManager = ManagerUtil.isManager + if (isManager) { + logger.info( + s"support parallelism : ${AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getHotValue()}" + ) + } + otherParams.put(GovernanceCommonConf.EC_APP_MANAGE_MODE.key, flinkClientType.toLowerCase()) val context = new EnvironmentContext( defaultEnv, new Configuration, @@ -112,7 +127,8 @@ class FlinkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging flinkProvidedLibPath, providedLibDirsArray, shipDirsArray, - new util.ArrayList[URL] + new util.ArrayList[URL], + otherParams ) // Step1: environment-level configurations val jobName = options.getOrDefault("flink.app.name", "EngineConn-Flink") @@ -391,8 +407,14 @@ class FlinkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging ): FlinkEngineConnContext = new FlinkEngineConnContext(environmentContext) - override protected def getDefaultExecutorFactoryClass: Class[_ <: ExecutorFactory] = - classOf[FlinkCodeExecutorFactory] + override protected def getDefaultExecutorFactoryClass: Class[_ <: ExecutorFactory] = { + val options = EngineConnServer.getEngineCreationContext.getOptions + if (FlinkEnvConfiguration.FLINK_MANAGER_MODE_CONFIG_KEY.getValue(options)) { + classOf[FlinkManagerExecutorFactory] + } else { + classOf[FlinkCodeExecutorFactory] + } + } override protected def getEngineConnType: EngineType = EngineType.FLINK @@ -400,7 +422,8 @@ class FlinkEngineConnFactory extends MultiExecutorEngineConnFactory with Logging ClassUtil.getInstance(classOf[FlinkSQLExecutorFactory], new FlinkSQLExecutorFactory), ClassUtil .getInstance(classOf[FlinkApplicationExecutorFactory], new FlinkApplicationExecutorFactory), - ClassUtil.getInstance(classOf[FlinkCodeExecutorFactory], new FlinkCodeExecutorFactory) + ClassUtil.getInstance(classOf[FlinkCodeExecutorFactory], new FlinkCodeExecutorFactory), + ClassUtil.getInstance(classOf[FlinkManagerExecutorFactory], new FlinkManagerExecutorFactory) ) override def getExecutorFactories: Array[ExecutorFactory] = executorFactoryArray diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkManagerExecutorFactory.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkManagerExecutorFactory.scala new file mode 100644 index 0000000000..6bdf432975 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/factory/FlinkManagerExecutorFactory.scala @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.factory + +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconn.common.engineconn.EngineConn +import org.apache.linkis.engineconn.computation.executor.creation.ComputationExecutorFactory +import org.apache.linkis.engineconn.computation.executor.execute.ComputationExecutor +import org.apache.linkis.engineconn.once.executor.OnceExecutor +import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorFactory +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration +import org.apache.linkis.engineconnplugin.flink.context.FlinkEngineConnContext +import org.apache.linkis.engineconnplugin.flink.executor.{ + FlinkCodeOnceExecutor, + FlinkManagerConcurrentExecutor +} +import org.apache.linkis.engineconnplugin.flink.factory.FlinkManagerExecutorFactory.setDefaultExecutor +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.engine.RunType._ + +class FlinkManagerExecutorFactory extends OnceExecutorFactory { + + override protected def newExecutor( + id: Int, + engineCreationContext: EngineCreationContext, + engineConn: EngineConn, + labels: Array[Label[_]] + ): OnceExecutor = engineConn.getEngineConnSession match { + case flinkEngineConnContext: FlinkEngineConnContext => + val executor = new FlinkManagerConcurrentExecutor( + id, + FlinkEnvConfiguration.FLINK_MANAGER_LOAD_TASK_MAX.getValue, + flinkEngineConnContext + ) + setDefaultExecutor(executor) + executor + } + + // just set lots of runType, but now only sql is supported. + override protected def getSupportRunTypes: Array[String] = + Array(JSON.toString) + + override protected def getRunType: RunType = JSON +} + +object FlinkManagerExecutorFactory { + + private var defaultExecutor: FlinkManagerConcurrentExecutor = _ + + def setDefaultExecutor(executor: FlinkManagerConcurrentExecutor): Unit = { + defaultExecutor = executor + } + + def getDefaultExecutor(): FlinkManagerConcurrentExecutor = defaultExecutor + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/hook/EngineLoadOperationHook.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/hook/EngineLoadOperationHook.scala new file mode 100644 index 0000000000..78ae653077 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/hook/EngineLoadOperationHook.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.hook + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.acessible.executor.entity.AccessibleExecutor +import org.apache.linkis.engineconn.acessible.executor.hook.OperationHook +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration +import org.apache.linkis.engineconnplugin.flink.factory.FlinkManagerExecutorFactory +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.protocol.engine.{ + EngineOperateRequest, + EngineOperateResponse +} + +import org.springframework.stereotype.Service + +import javax.annotation.PostConstruct + +import java.util.concurrent.atomic.AtomicInteger + +import scala.collection.mutable + +@Service +class EngineLoadOperationHook extends OperationHook with Logging { + + @PostConstruct + private def init(): Unit = { + OperationHook.registerOperationHook(this) + logger.info(s"${getName()} init success.") + } + + private val taskNum = new AtomicInteger(0) + private val lock = new Object + + override def getName(): String = getClass.getSimpleName + + override def doPreOperation( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit = { + ExecutorManager.getInstance.getReportExecutor match { + case accessibleExecutor: AccessibleExecutor => + accessibleExecutor.updateLastActivityTime() + case _ => + } + if ( + taskNum.incrementAndGet() >= FlinkEnvConfiguration.FLINK_MANAGER_LOAD_TASK_MAX.getHotValue() + ) { + lock.synchronized { + if ( + taskNum + .incrementAndGet() >= FlinkEnvConfiguration.FLINK_MANAGER_LOAD_TASK_MAX.getHotValue() + ) { + FlinkManagerExecutorFactory.getDefaultExecutor() match { + case accessibleExecutor: AccessibleExecutor => + if (NodeStatus.Busy != accessibleExecutor.getStatus) { + accessibleExecutor.transition(NodeStatus.Busy) + logger.warn("The number of tasks exceeds the maximum limit, change status to busy.") + } + case _ => logger.error("FlinkManagerExecutorFactory.getDefaultExecutor() is None.") + } + } + } + } + } + + override def doPostOperation( + engineOperateRequest: EngineOperateRequest, + engineOperateResponse: EngineOperateResponse + ): Unit = { + if (taskNum.get() - 1 < FlinkEnvConfiguration.FLINK_MANAGER_LOAD_TASK_MAX.getHotValue()) { + lock.synchronized { + if ( + taskNum + .decrementAndGet() < FlinkEnvConfiguration.FLINK_MANAGER_LOAD_TASK_MAX.getHotValue() + ) { + FlinkManagerExecutorFactory.getDefaultExecutor() match { + case accessibleExecutor: AccessibleExecutor => + if (NodeStatus.Busy == accessibleExecutor.getStatus) { + accessibleExecutor.transition(NodeStatus.Unlock) + logger.warn( + "The number of tasks is less than the maximum limit, change status to unlock." + ) + } + case _ => logger.error("FlinkManagerExecutorFactory.getDefaultExecutor() is None.") + } + } + } + } + if (logger.isDebugEnabled()) { + logger.debug(s"taskNum: ${taskNum.get()}") + } + } + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala index cbcae4ea54..8afe86b976 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala @@ -24,12 +24,22 @@ import org.apache.linkis.hadoop.common.conf.HadoopConf import org.apache.linkis.manager.common.protocol.bml.BmlResource import org.apache.linkis.manager.engineplugin.common.conf.EnvConfiguration import org.apache.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest -import org.apache.linkis.manager.engineplugin.common.launch.process.Environment.{variable, USER} -import org.apache.linkis.manager.engineplugin.common.launch.process.JavaProcessEngineConnLaunchBuilder +import org.apache.linkis.manager.engineplugin.common.launch.process.{ + Environment, + JavaProcessEngineConnLaunchBuilder +} +import org.apache.linkis.manager.engineplugin.common.launch.process.Environment.{ + variable, + PWD, + USER +} +import org.apache.linkis.manager.engineplugin.common.launch.process.LaunchConstants.{ + addPathToClassPath, + CLASS_PATH_SEPARATOR +} import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel import java.util - import scala.collection.JavaConverters._ class FlinkEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder { @@ -81,6 +91,31 @@ class FlinkEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder { } bmlResources } + override def getEnvironment(implicit + engineConnBuildRequest: EngineConnBuildRequest + ): util.Map[String, String] = { + val environment = new util.HashMap[String, String] + addPathToClassPath(environment, variable(PWD)) + val linkisEnvironment = super.getEnvironment + val linkisClassPath = linkisEnvironment.get(Environment.CLASSPATH.toString) + val v = environment.get(Environment.CLASSPATH.toString) + CLASS_PATH_SEPARATOR + linkisClassPath + environment.put(Environment.CLASSPATH.toString, v) + logger.info(environment.asScala.map(e => s"${e._1}->${e._2}").mkString(",")) + environment + } + + override def getEnvironment(implicit + engineConnBuildRequest: EngineConnBuildRequest + ): util.Map[String, String] = { + val environment = new util.HashMap[String, String] + addPathToClassPath(environment, variable(PWD)) + val linkisEnvironment = super.getEnvironment + val linkisClassPath = linkisEnvironment.get(Environment.CLASSPATH.toString) + val v = environment.get(Environment.CLASSPATH.toString) + CLASS_PATH_SEPARATOR + linkisClassPath + environment.put(Environment.CLASSPATH.toString, v) + logger.info(environment.asScala.map(e => s"${e._1}->${e._2}").mkString(",")) + environment + } private def contentToBmlResource(userName: String, content: String): BmlResource = { val contentMap = JsonUtils.jackson.readValue(content, classOf[util.Map[String, Object]]) diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/KillOperator.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/KillOperator.scala new file mode 100644 index 0000000000..8bfbcad0a3 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/KillOperator.scala @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.operator + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconnplugin.flink.constants.FlinkECConstant +import org.apache.linkis.engineconnplugin.flink.operator.clientmanager.FlinkRestClientManager +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil.logAndException +import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.manager.common.operator.Operator + +import org.apache.hadoop.yarn.api.records.{ApplicationId, FinalApplicationStatus} + +import scala.collection.JavaConverters.collectionAsScalaIterableConverter +import scala.collection.mutable + +class KillOperator extends Operator with Logging { + + override def getNames: Array[String] = Array("kill") + + override def apply(implicit params: Map[String, Any]): Map[String, Any] = { + + val rsMap = new mutable.HashMap[String, String] + val appIdStr = params.getOrElse(ECConstants.YARN_APPID_NAME_KEY, "").asInstanceOf[String] + val snapShot = params.getOrElse(FlinkECConstant.SNAPHOT_KEY, "false").toString.toBoolean + + val appId: ApplicationId = YarnUtil.retrieveApplicationId(appIdStr) + + var isStopped = false + val restClient = + Utils.tryCatch { + FlinkRestClientManager.getFlinkRestClient(appIdStr) + } { case e: Exception => + val yarnClient = YarnUtil.getYarnClient() + val appReport = yarnClient.getApplicationReport(appId) + if (appReport.getFinalApplicationStatus != FinalApplicationStatus.UNDEFINED) { + // Flink cluster is not running anymore + val msg = + s"The application ${appIdStr} doesn't run anymore. It has previously completed with final status: ${appReport.getFinalApplicationStatus.toString}" + logAndException(msg) + isStopped = true + null + } else { + val msg = s"Get client for app ${appIdStr} failed, because : ${e.getMessage}" + throw logAndException(msg) + } + } + if (!isStopped) { + if (snapShot) { + val checkPointPath = + params.getOrElse(FlinkECConstant.SAVAPOINT_PATH_KEY, null).asInstanceOf[String] + val rs = YarnUtil.triggerSavepoint(appIdStr, checkPointPath, restClient) + rsMap.put(FlinkECConstant.MSG_KEY, rs) + } + val jobs = restClient.listJobs().get() + if (null == jobs || jobs.isEmpty) { + val msg = s"App : ${appIdStr} have no jobs, but is not ended." + throw logAndException(msg) + } + val msg = s"Try to kill ${jobs.size()} jobs of app : ${appIdStr}" + jobs.asScala.foreach(job => restClient.cancel(job.getJobId)) + rsMap += (FlinkECConstant.MSG_KEY -> msg) + } + + rsMap.toMap[String, String] + } + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/ListOperator.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/ListOperator.scala new file mode 100644 index 0000000000..1befe48276 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/ListOperator.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.operator + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.exception.EngineConnException +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil +import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.governance.common.exception.engineconn.EngineConnExecutorErrorCode +import org.apache.linkis.manager.common.operator.Operator +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.hadoop.yarn.api.records.{FinalApplicationStatus, YarnApplicationState} + +import java.util + +import scala.collection.JavaConverters.asScalaBufferConverter +import scala.collection.mutable + +class ListOperator extends Operator with Logging { + + private val json = BDPJettyServerHelper.jacksonJson + + override def getNames: Array[String] = Array("list") + + override def apply(implicit params: Map[String, Any]): Map[String, Any] = { + + val applicationTypeSet = new util.HashSet[String]() + var appStateSet = util.EnumSet.of[YarnApplicationState](YarnApplicationState.RUNNING) + var appName = "" + + Utils.tryCatch { + val appTypeList = params + .getOrElse(ECConstants.YARN_APP_TYPE_LIST_KEY, new util.ArrayList[String]()) + .asInstanceOf[util.List[String]] + appTypeList.asScala.foreach(applicationTypeSet.add) + val appStateList = params + .getOrElse(ECConstants.YARN_APP_STATE_LIST_KEY, new util.ArrayList[String]()) + .asInstanceOf[util.List[String]] + val appStateArray = new util.HashSet[YarnApplicationState] + appStateList.asScala.foreach(e => appStateArray.add(YarnApplicationState.valueOf(e))) + if (!appStateArray.isEmpty) { + appStateSet = util.EnumSet.copyOf(appStateArray) + } + appName = params.getOrElse(ECConstants.YARN_APP_NAME_KEY, "").asInstanceOf[String] + } { e: Throwable => + val msg = "Invalid params. " + e.getMessage + logger.error(msg, e) + throw new EngineConnException(EngineConnExecutorErrorCode.INVALID_PARAMS, msg) + } + + val yarnClient = YarnUtil.getYarnClient() + val appList = yarnClient.getApplications(applicationTypeSet, appStateSet) + val rsMap = new mutable.HashMap[String, String] + Utils.tryCatch { + val appTypeStr = json.writeValueAsString(applicationTypeSet) + val appStateStr = json.writeValueAsString(appStateSet) + val rsAppList = new util.ArrayList[util.Map[String, String]]() + appList.asScala.foreach(report => { + if (report.getName.contains(appName)) { + val tmpMap = new util.HashMap[String, String]() + tmpMap.put(ECConstants.YARN_APP_NAME_KEY, report.getName) + tmpMap.put(ECConstants.YARN_APP_TYPE_KEY, report.getApplicationType) + tmpMap.put(ECConstants.YARN_APPID_NAME_KEY, report.getApplicationId.toString) + tmpMap.put(ECConstants.YARN_APP_URL_KEY, report.getTrackingUrl) + val appStatus = + if (report.getFinalApplicationStatus != FinalApplicationStatus.UNDEFINED) { + report.getFinalApplicationStatus + } else { + report.getYarnApplicationState + } + tmpMap.put( + ECConstants.NODE_STATUS_KEY, + YarnUtil + .convertYarnStateToNodeStatus(report.getApplicationId.toString, appStatus.toString) + .toString + ) + rsAppList.add(tmpMap) + } + }) + val listStr = json.writeValueAsString(rsAppList) + + logger.info( + s"List yarn apps, params : appTypeSet : ${appTypeStr}, appStateSet : ${appStateStr}, list : ${listStr}" + ) + + rsMap += (ECConstants.YARN_APP_RESULT_LIST_KEY -> listStr) + } { case e: Exception => + val msg = "convert listStr failed. Because : " + e.getMessage + logger.error(msg) + throw e + } + + rsMap.toMap[String, String] + } + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/StatusOperator.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/StatusOperator.scala new file mode 100644 index 0000000000..02f5bb35e7 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/StatusOperator.scala @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.operator + +import org.apache.linkis.common.exception.{LinkisException, LinkisRuntimeException} +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.common.exception.EngineConnException +import org.apache.linkis.engineconnplugin.flink.util.{ManagerUtil, YarnUtil} +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil.logAndException +import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.governance.common.exception.engineconn.EngineConnExecutorErrorCode +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus +import org.apache.linkis.manager.common.operator.Operator + +import org.apache.commons.lang3.StringUtils +import org.apache.hadoop.yarn.api.records.{ApplicationId, ApplicationReport, FinalApplicationStatus} +import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException + +import scala.collection.mutable + +class StatusOperator extends Operator with Logging { + + override def getNames: Array[String] = Array("status") + + override def apply(implicit params: Map[String, Any]): Map[String, Any] = { + + val appIdStr = params.getOrElse(ECConstants.YARN_APPID_NAME_KEY, "").asInstanceOf[String] + + val parts = appIdStr.split("_") + val clusterTimestamp = parts(1).toLong + val sequenceNumber = parts(2).toInt + + // Create an ApplicationId object using newInstance method + val appId = ApplicationId.newInstance(clusterTimestamp, sequenceNumber) + val rsMap = new mutable.HashMap[String, String] + + val yarnClient = YarnUtil.getYarnClient() + var appReport: ApplicationReport = null + Utils.tryCatch { + appReport = yarnClient.getApplicationReport(appId) + if (null == appReport) { + throw logAndException(s"Got null appReport for appid : ${appIdStr}") + } + } { case notExist: ApplicationNotFoundException => + logger.error(s"Application : ${appIdStr} not exists, will set the status to failed.") + rsMap += (ECConstants.NODE_STATUS_KEY -> NodeStatus.Failed.toString) + rsMap += (ECConstants.YARN_APPID_NAME_KEY -> appIdStr) + return rsMap.toMap[String, String] + } + + // Get the application status (YarnApplicationState) + val appStatus = if (appReport.getFinalApplicationStatus != FinalApplicationStatus.UNDEFINED) { + appReport.getFinalApplicationStatus + } else { + appReport.getYarnApplicationState + } + + val nodeStatus: NodeStatus = YarnUtil.convertYarnStateToNodeStatus(appIdStr, appStatus.toString) + + logger.info(s"try to get appid: ${appIdStr}, status ${nodeStatus.toString}.") + rsMap += (ECConstants.NODE_STATUS_KEY -> nodeStatus.toString) + rsMap += (ECConstants.YARN_APPID_NAME_KEY -> appIdStr) + rsMap.toMap[String, String] + } + +} + +object StatusOperator extends Logging { + + private var handshaked: Boolean = false + + def addHandshake(): Unit = { + handshaked = true + } + + def isHandshaked: Boolean = handshaked + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/TriggerSavepointOperator.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/TriggerSavepointOperator.scala index 5be53243b6..0109bb42fa 100644 --- a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/TriggerSavepointOperator.scala +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/TriggerSavepointOperator.scala @@ -18,32 +18,72 @@ package org.apache.linkis.engineconnplugin.flink.operator import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.launch.EngineConnServer import org.apache.linkis.engineconn.once.executor.creation.OnceExecutorManager +import org.apache.linkis.engineconnplugin.flink.constants.FlinkECConstant import org.apache.linkis.engineconnplugin.flink.errorcode.FlinkErrorCodeSummary._ import org.apache.linkis.engineconnplugin.flink.exception.JobExecutionException import org.apache.linkis.engineconnplugin.flink.executor.FlinkOnceExecutor +import org.apache.linkis.engineconnplugin.flink.operator.clientmanager.FlinkRestClientManager +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil.logAndException +import org.apache.linkis.governance.common.constant.ec.ECConstants import org.apache.linkis.manager.common.operator.Operator +import org.apache.hadoop.yarn.api.records.FinalApplicationStatus + import java.text.MessageFormat +import java.util + +import scala.collection.mutable class TriggerSavepointOperator extends Operator with Logging { override def getNames: Array[String] = Array("doSavepoint") - override def apply(implicit parameters: Map[String, Any]): Map[String, Any] = { - val savepoint = getAsThrow[String]("savepointPath") - val mode = getAsThrow[String]("mode") - logger.info(s"try to $mode savepoint with path $savepoint.") - OnceExecutorManager.getInstance.getReportExecutor match { - case flinkExecutor: FlinkOnceExecutor[_] => - val writtenSavepoint = - flinkExecutor.getClusterDescriptorAdapter.doSavepoint(savepoint, mode) - Map("writtenSavepoint" -> writtenSavepoint) - case executor => - throw new JobExecutionException( - MessageFormat.format(NOT_SUPPORT_SAVEPOTION.getErrorDesc, executor.getClass.getSimpleName) + override def apply(implicit params: Map[String, Any]): Map[String, Any] = { + val rsMap = new mutable.HashMap[String, String] + + val savepointPath = getAsThrow[String](FlinkECConstant.SAVAPOINT_PATH_KEY) + val appIdStr = getAsThrow[String](ECConstants.YARN_APPID_NAME_KEY) + val mode = getAsThrow[String](FlinkECConstant.SAVEPOINT_MODE_KEY) + + val appId = YarnUtil.retrieveApplicationId(appIdStr) + val yarnClient = YarnUtil.getYarnClient() + val appReport = yarnClient.getApplicationReport(appId) + if (appReport.getFinalApplicationStatus != FinalApplicationStatus.UNDEFINED) { + // Flink cluster is not running anymore + val msg = + s"The application ${appIdStr} doesn't run anymore. It has previously completed with final status: ${appReport.getFinalApplicationStatus.toString}" + throw logAndException(msg) + } + + logger.info(s"try to $mode savepoint with path $savepointPath.") + if ( + YarnUtil.isDetach( + EngineConnServer.getEngineCreationContext.getOptions.asInstanceOf[util.Map[String, Any]] ) + ) { + logger.info("The flink cluster is detached, use rest api to trigger savepoint.") + val restClient = FlinkRestClientManager.getFlinkRestClient(appIdStr) + val rs = YarnUtil.triggerSavepoint(appIdStr, savepointPath, restClient) + rsMap.put(FlinkECConstant.RESULT_SAVEPOINT_KEY, rs) + } else { + logger.info("The flink cluster is not detached, use flink client to trigger savepoint.") + OnceExecutorManager.getInstance.getReportExecutor match { + case flinkExecutor: FlinkOnceExecutor[_] => + val writtenSavepoint = + flinkExecutor.getClusterDescriptorAdapter.doSavepoint(savepointPath, mode) + rsMap.put(FlinkECConstant.RESULT_SAVEPOINT_KEY, writtenSavepoint) + case executor => + throw new JobExecutionException( + NOT_SUPPORT_SAVEPOTION.getErrorDesc + executor.getClass.getSimpleName + + MessageFormat + .format(NOT_SUPPORT_SAVEPOTION.getErrorDesc, executor.getClass.getSimpleName) + ) + } } + rsMap.toMap } } diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/clientmanager/FlinkRestClientManager.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/clientmanager/FlinkRestClientManager.scala new file mode 100644 index 0000000000..d7a14beb44 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/operator/clientmanager/FlinkRestClientManager.scala @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.operator.clientmanager + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration +import org.apache.linkis.engineconnplugin.flink.executor.FlinkManagerConcurrentExecutor +import org.apache.linkis.engineconnplugin.flink.factory.FlinkManagerExecutorFactory +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil +import org.apache.linkis.engineconnplugin.flink.util.YarnUtil.logAndException + +import org.apache.flink.client.program.rest.RestClusterClient +import org.apache.flink.configuration.Configuration +import org.apache.hadoop.yarn.api.records.{ApplicationId, FinalApplicationStatus} + +import java.util.concurrent.TimeUnit + +import com.google.common.cache.{ + CacheBuilder, + CacheLoader, + LoadingCache, + RemovalListener, + RemovalNotification +} + +object FlinkRestClientManager extends Logging { + + private lazy val restclientCache + : LoadingCache[String, RestClusterClient[ApplicationId]] = CacheBuilder + .newBuilder() + .maximumSize(FlinkEnvConfiguration.FLINK_MANAGER_CLIENT_MAX_NUM.getValue) + .expireAfterAccess( + FlinkEnvConfiguration.FLINK_MANAGER_CLIENT_EXPIRE_MILLS.getValue, + TimeUnit.MILLISECONDS + ) + .weakKeys() + .removalListener(new RemovalListener[String, RestClusterClient[ApplicationId]]() { + + override def onRemoval( + notification: RemovalNotification[String, RestClusterClient[ApplicationId]] + ): Unit = { + logger.info(s"RestClusterClient of AppId : ${notification.getKey} was removed.") + } + + }) + .build(new CacheLoader[String, RestClusterClient[ApplicationId]]() { + + override def load(appIdStr: String): RestClusterClient[ApplicationId] = { + + val appId: ApplicationId = YarnUtil.retrieveApplicationId(appIdStr) + + val yarnClient = YarnUtil.getYarnClient() + val appReport = yarnClient.getApplicationReport(appId) + + if (appReport.getFinalApplicationStatus != FinalApplicationStatus.UNDEFINED) { + // Flink cluster is not running anymore + val msg = + s"The application ${appIdStr} doesn't run anymore. It has previously completed with final status: ${appReport.getFinalApplicationStatus.toString}" + throw logAndException(msg) + } + + val executor = FlinkManagerExecutorFactory.getDefaultExecutor() + val tmpFlinkConf: Configuration = executor match { + case flinkManagerExecutor: FlinkManagerConcurrentExecutor => + flinkManagerExecutor.getFlinkContext().getEnvironmentContext.getFlinkConfig.clone() + case _ => + val msg = s"Invalid FlinkManagerConcurrentExecutor : ${executor}" + throw logAndException(msg) + } + YarnUtil.setClusterEntrypointInfoToConfig(tmpFlinkConf, appReport) + new RestClusterClient[ApplicationId](tmpFlinkConf, appReport.getApplicationId) + } + + }) + + def getFlinkRestClient(appIdStr: String): RestClusterClient[ApplicationId] = + restclientCache.get(appIdStr) + + def setFlinkRestClient(appIdStr: String, client: RestClusterClient[ApplicationId]): Unit = + restclientCache.put(appIdStr, client) + +} diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/resource/FlinkJobLocalResourceCleaner.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/resource/FlinkJobLocalResourceCleaner.scala new file mode 100644 index 0000000000..b849d6967d --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/resource/FlinkJobLocalResourceCleaner.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.resource + +import org.apache.linkis.common.utils.Logging + +import java.io.File + +/** + * Cleaner for local resource + */ +class FlinkJobLocalResourceCleaner extends FlinkJobResourceCleaner with Logging { + + /** + * Clean up path array + * + * @param resArray + * resource array + */ + override def cleanup(resArray: Array[String]): Unit = { + Option(resArray).foreach(paths => + paths.foreach(path => { + Option(getClass.getClassLoader.getResource(path)) match { + case Some(url) => + val localFile = new File(url.getPath) + if (localFile.exists()) { + logger.info(s"Clean the resource: [${localFile.getPath}]") + localFile.delete() + } + case _ => + } + + }) + ) + } + + /** + * If accept the path + * + * @param resource + * path + * @return + */ + override def accept(resource: String): Boolean = { + Option(getClass.getClassLoader.getResource(resource)) match { + case Some(url) => + val file = new File(url.getPath) + // Avoid the linked file/directory + file.getCanonicalFile.equals(file.getAbsoluteFile) + case _ => false + } + } + +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ResultAccessibleJob.java b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/resource/FlinkJobResourceCleaner.scala similarity index 69% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ResultAccessibleJob.java rename to linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/resource/FlinkJobResourceCleaner.scala index 564ed03e10..76d4fe8552 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/ResultAccessibleJob.java +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/resource/FlinkJobResourceCleaner.scala @@ -15,15 +15,24 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.job; +package org.apache.linkis.engineconnplugin.flink.resource -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.entity.job.JobData; +trait FlinkJobResourceCleaner { -public interface ResultAccessibleJob extends Job { /** - * Retrieve reault-set. This methods should be non-blocking and result should be appended into - * {@link JobData} + * Clean up resource array + * + * @param resArray + * resource array */ - void startRetrieveResult(); + def cleanup(resArray: Array[String]): Unit + + /** + * If accept the resource + * + * @param resource + * resource + * @return + */ + def accept(resource: String): Boolean } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/OperatorUtilsTest.java b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/util/ManagerUtil.scala similarity index 63% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/OperatorUtilsTest.java rename to linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/util/ManagerUtil.scala index 53dd359148..a4dec1cf82 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/test/java/org/apache/linkis/cli/application/operator/OperatorUtilsTest.java +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/util/ManagerUtil.scala @@ -15,21 +15,19 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.operator; +package org.apache.linkis.engineconnplugin.flink.util -import org.junit.jupiter.api.Assertions; -import org.junit.jupiter.api.DisplayName; -import org.junit.jupiter.api.Test; +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.launch.EngineConnServer +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration -public class OperatorUtilsTest { +import java.util - @Test - @DisplayName("getNumOfLinesTest") - public void getNumOfLinesTest() { +object ManagerUtil extends Logging { - String str = "name, \n" + "names"; - int numOfLines = OperatorUtils.getNumOfLines(str); - - Assertions.assertTrue(2 == numOfLines); + val isManager: Boolean = { + val options = EngineConnServer.getEngineCreationContext.getOptions + FlinkEnvConfiguration.FLINK_MANAGER_MODE_CONFIG_KEY.getValue(options) } + } diff --git a/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/util/YarnUtil.scala b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/util/YarnUtil.scala new file mode 100644 index 0000000000..a1c96619e6 --- /dev/null +++ b/linkis-engineconn-plugins/flink/src/main/scala/org/apache/linkis/engineconnplugin/flink/util/YarnUtil.scala @@ -0,0 +1,245 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.flink.util + +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.engineconn.core.executor.ExecutorManager +import org.apache.linkis.engineconn.executor.entity.YarnExecutor +import org.apache.linkis.engineconnplugin.flink.config.FlinkEnvConfiguration +import org.apache.linkis.engineconnplugin.flink.exception.JobExecutionException +import org.apache.linkis.governance.common.conf.GovernanceCommonConf +import org.apache.linkis.governance.common.constant.ec.ECConstants +import org.apache.linkis.manager.common.entity.enumeration.NodeStatus + +import org.apache.commons.lang3.StringUtils +import org.apache.flink +import org.apache.flink.client.program.rest.RestClusterClient +import org.apache.flink.configuration.{HighAvailabilityOptions, JobManagerOptions, RestOptions} +import org.apache.flink.runtime.client.JobStatusMessage +import org.apache.flink.yarn.configuration.YarnConfigOptions +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.yarn.api.records.{ + ApplicationId, + ApplicationReport, + FinalApplicationStatus, + YarnApplicationState +} +import org.apache.hadoop.yarn.client.api.YarnClient +import org.apache.hadoop.yarn.conf.YarnConfiguration +import org.apache.hadoop.yarn.util.ConverterUtils + +import java.util + +import scala.collection.JavaConverters.collectionAsScalaIterableConverter +import scala.collection.mutable.ArrayBuffer + +object YarnUtil extends Logging { + + val CORE_SITE = "core-site.xml" + val YARN_SITE = "yarn-site.xml" + val HDFS_SITE = "hdfs-site.xml" + val MAPRED_SITE = "mapred-site.xml" + + private var yarnClient: YarnClient = _ + + def getYarnClient(): YarnClient = { + if (null == yarnClient) { + YarnUtil.getClass.synchronized { + if (null == yarnClient) { + yarnClient = createYarnClient() + } + } + } + yarnClient + } + + private def createYarnClient(): YarnClient = { + val yarnClient = YarnClient.createYarnClient() + val hadoopConf = getHadoopConf() + val yarnConfiguration = new YarnConfiguration(hadoopConf) + yarnClient.init(yarnConfiguration) + yarnClient.start() + yarnClient + } + + private def getHadoopConf(): Configuration = { + val conf = new Configuration() + var confRoot = FlinkEnvConfiguration.HADOOP_CONF_DIR.getValue + if (StringUtils.isBlank(confRoot)) { + throw new JobExecutionException("HADOOP_CONF_DIR or linkis.flink.hadoop.conf.dir not set!") + } + confRoot = confRoot + "/" + conf.addResource(confRoot + HDFS_SITE) + conf.addResource(confRoot + CORE_SITE) + conf.addResource(confRoot + MAPRED_SITE) + conf.addResource(confRoot + YARN_SITE) + conf + } + + def setClusterEntrypointInfoToConfig( + flinkConfiguration: flink.configuration.Configuration, + appReport: ApplicationReport + ): Unit = { + if (null == appReport) { + val msg = "Invalid null appReport" + logger.error(msg) + throw new JobExecutionException(msg) + } + + val appId = appReport.getApplicationId + val host = appReport.getHost + val port = appReport.getRpcPort + + logger.info(s"Found Web Interface ${host}:${port} of application '${appId}'.") + + flinkConfiguration.setString(JobManagerOptions.ADDRESS, host) + flinkConfiguration.setInteger(JobManagerOptions.PORT, port) + + flinkConfiguration.setString(RestOptions.ADDRESS, host) + flinkConfiguration.setInteger(RestOptions.PORT, port) + + flinkConfiguration.set(YarnConfigOptions.APPLICATION_ID, ConverterUtils.toString(appId)) + + if (!flinkConfiguration.contains(HighAvailabilityOptions.HA_CLUSTER_ID)) { + flinkConfiguration.set(HighAvailabilityOptions.HA_CLUSTER_ID, ConverterUtils.toString(appId)) + } + } + + def logAndException(msg: String, t: Throwable = null): ErrorException = { + logger.error(msg, t) + new JobExecutionException(msg) + } + + def retrieveApplicationId(appIdStr: String): ApplicationId = { + val parts = appIdStr.split("_") + val clusterTimestamp = parts(1).toLong + val sequenceNumber = parts(2).toInt + // Create an ApplicationId object using newInstance method + val appId = ApplicationId.newInstance(clusterTimestamp, sequenceNumber) + appId + } + + def triggerSavepoint( + appIdStr: String, + checkPointPath: String, + restClient: RestClusterClient[ApplicationId] + ): String = { + val jobs = restClient.listJobs().get() + if (null == jobs || jobs.size() > 1) { + val size = if (null == jobs) { + 0 + } else { + jobs.size() + } + val msg = s"App : ${appIdStr} have ${size} jobs, cannot do snapshot." + throw logAndException(msg) + } + if (StringUtils.isBlank(checkPointPath)) { + val msg = s"App : ${appIdStr} checkpoint path is null, cannot do checkpoint" + throw logAndException(msg) + } else { + val firstJob = jobs.asScala.headOption.getOrElse(null).asInstanceOf[JobStatusMessage] + if (null == firstJob) { + val msg = s"App : ${appIdStr} got no head job, cannot do checkPoint and cancel." + throw new JobExecutionException(msg) + } + val rs = restClient.triggerSavepoint(firstJob.getJobId, checkPointPath).get() + rs + } + } + + def convertYarnStateToNodeStatus(appIdStr: String, appStatus: String): NodeStatus = { + val nodeStatus = appStatus match { + case finalState if (FinalApplicationStatus.values().map(_.toString).contains(finalState)) => + FinalApplicationStatus.valueOf(finalState) match { + case FinalApplicationStatus.KILLED | FinalApplicationStatus.FAILED => + NodeStatus.Failed + case FinalApplicationStatus.SUCCEEDED => + NodeStatus.Success + case _ => + val msg: String = if (null != appStatus) { + s"Application : ${appIdStr} has unknown state : ${appStatus.toString}" + } else { + s"Application : ${appIdStr} has null state" + } + throw new JobExecutionException(msg) + } + case yarnState if (YarnApplicationState.values().map(_.toString).contains(yarnState)) => + YarnApplicationState.valueOf(yarnState) match { + case YarnApplicationState.FINISHED => + val msg: String = "Invalid yarn app state : FINISHED" + throw new JobExecutionException(msg) + case YarnApplicationState.KILLED | YarnApplicationState.FAILED => + NodeStatus.Failed + case _ => + NodeStatus.Running + } + case _ => + val msg: String = if (null != appStatus) { + s"Application : ${appIdStr} has unknown state : ${appStatus.toString}" + } else { + s"Application : ${appIdStr} has null state" + } + throw new JobExecutionException(msg) + } + nodeStatus + } + + def isDetach(params: util.Map[String, Any]): Boolean = { + val managerOn = params.getOrDefault( + FlinkEnvConfiguration.FLINK_MANAGER_MODE_CONFIG_KEY.key, + FlinkEnvConfiguration.FLINK_MANAGER_MODE_CONFIG_KEY.getValue + ) + if (null != managerOn && managerOn.toString.toBoolean) { + return true + } + val clientType = params + .getOrDefault( + GovernanceCommonConf.EC_APP_MANAGE_MODE.key, + GovernanceCommonConf.EC_APP_MANAGE_MODE.getValue + ) + .toString + logger.info(s"clientType : ${clientType}") + clientType.toLowerCase() match { + case ECConstants.EC_CLIENT_TYPE_DETACH => + true + case _ => + false + } + } + + def getAppIds: Array[String] = { + val ids = new ArrayBuffer[String] + ExecutorManager.getInstance.getExecutors.foreach(executor => { + executor match { + case yarnExecutor: YarnExecutor => + ids.append(yarnExecutor.getApplicationId) + case _ => + } + }) + if (ids.size > 1) { + logger.error( + "There are more than one yarn application running, please check it. Ids : " + ids + .mkString(",") + ) + } + ids.toArray + } + +} diff --git a/linkis-engineconn-plugins/hive/pom.xml b/linkis-engineconn-plugins/hive/pom.xml index 8fe446167c..6cf558479a 100644 --- a/linkis-engineconn-plugins/hive/pom.xml +++ b/linkis-engineconn-plugins/hive/pom.xml @@ -198,6 +198,10 @@ org.apache.logging.log4j log4j-slf4j-impl + + com.google.inject + guice + @@ -220,15 +224,7 @@ com.sun.jersey - jersey-core - - - com.sun.jersey - jersey-server - - - com.sun.jersey - jersey-json + * javax.ws.rs diff --git a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java index 671b0c1d19..fe948f7952 100644 --- a/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java +++ b/linkis-engineconn-plugins/hive/src/main/java/org/apache/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java @@ -17,6 +17,8 @@ package org.apache.linkis.engineplugin.hive.serde; +import org.apache.linkis.common.utils.ClassUtils; + import org.apache.commons.codec.binary.Base64; import org.apache.hadoop.hive.serde2.ByteStream; import org.apache.hadoop.hive.serde2.SerDeException; @@ -33,6 +35,7 @@ import java.io.IOException; import java.io.OutputStream; +import java.lang.reflect.InvocationTargetException; import java.util.Iterator; import java.util.List; import java.util.Map; @@ -309,18 +312,6 @@ private static void writePrimitiveUTF8( binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes()); break; } - case INTERVAL_YEAR_MONTH: - { - wc = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o); - binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes()); - break; - } - case INTERVAL_DAY_TIME: - { - wc = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o); - binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes()); - break; - } case DECIMAL: { HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) oi; @@ -329,7 +320,52 @@ private static void writePrimitiveUTF8( } default: { - throw new RuntimeException("Unknown primitive type: " + category); + if (!"INTERVAL_YEAR_MONTH".equals(category.name()) + && !"INTERVAL_DAY_TIME".equals(category.name())) { + throw new RuntimeException("Unknown primitive type: " + category); + } + boolean containsIntervalYearMonth = false; + boolean containsIntervalDayTime = false; + for (PrimitiveObjectInspector.PrimitiveCategory primitiveCategory : + PrimitiveObjectInspector.PrimitiveCategory.values()) { + containsIntervalYearMonth = + "INTERVAL_YEAR_MONTH".equals(primitiveCategory.name()) + && "INTERVAL_YEAR_MONTH".equals(category.name()); + containsIntervalDayTime = + "INTERVAL_DAY_TIME".equals(primitiveCategory.name()) + && "INTERVAL_DAY_TIME".equals(category.name()); + try { + if (containsIntervalYearMonth) { + wc = + (WritableComparable) + ClassUtils.getClassInstance( + "org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalYearMonthObjectInspector") + .getClass() + .getMethod("getPrimitiveWritableObject", Object.class) + .invoke(oi, o); + binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes()); + break; + } + if (containsIntervalDayTime) { + wc = + (WritableComparable) + ClassUtils.getClassInstance( + "org.apache.hadoop.hive.serde2.objectinspector.primitive.HiveIntervalDayTimeObjectInspector") + .getClass() + .getMethod("getPrimitiveWritableObject", Object.class) + .invoke(oi, o); + binaryData = Base64.encodeBase64(String.valueOf(wc).getBytes()); + break; + } + } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + LOG.error("Fail to invoke method:[getPrimitiveWritableObject]!", e); + } + } + if (containsIntervalYearMonth || containsIntervalDayTime) { + break; + } else { + throw new RuntimeException("Unknown primitive type: " + category); + } } } if (binaryData == null) { diff --git a/linkis-engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties b/linkis-engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties index 7fa9cab8ea..f8bf18cac4 100644 --- a/linkis-engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties +++ b/linkis-engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties @@ -22,4 +22,7 @@ wds.linkis.engine.connector.hooks=org.apache.linkis.engineconn.computation.execu #wds.linkis.engineconn.maintain.enable=true #Depending on the engine selected in HIVE_ENGINE_TYPE, control the function called when canceling the task in scripts. -linkis.hive.engine.type=mr \ No newline at end of file +linkis.hive.engine.type=mr + +# support parallelism execution +wds.linkis.engineconn.support.parallelism=false \ No newline at end of file diff --git a/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml index e6f8b150f3..b56efdb366 100644 --- a/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/hive/src/main/resources/log4j2.xml @@ -15,32 +15,36 @@ ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - + - + - + - + + + + + - + - + @@ -97,5 +101,11 @@ + + + + + + diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala index 2558000846..4de8f02f52 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala @@ -40,4 +40,10 @@ object HiveEngineConfiguration { ).getValue val HIVE_ENGINE_TYPE = CommonVars[String]("linkis.hive.engine.type", "mr").getValue + + val HIVE_ENGINE_CONCURRENT_LIMIT = + CommonVars[Int]("linkis.hive.engineconn.concurrent.limit", 10).getValue + + val HIVE_RANGER_ENABLE = CommonVars[Boolean]("linkis.hive.ranger.enabled", false).getValue + } diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala index 9a46975979..a9b217074d 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala @@ -18,29 +18,42 @@ package org.apache.linkis.engineplugin.hive.creation import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration import org.apache.linkis.engineconn.common.creation.EngineCreationContext import org.apache.linkis.engineconn.common.engineconn.EngineConn import org.apache.linkis.engineconn.computation.executor.creation.ComputationSingleExecutorEngineConnFactory import org.apache.linkis.engineconn.executor.entity.LabelExecutor import org.apache.linkis.engineplugin.hive.common.HiveUtils import org.apache.linkis.engineplugin.hive.conf.HiveEngineConfiguration -import org.apache.linkis.engineplugin.hive.entity.HiveSession +import org.apache.linkis.engineplugin.hive.entity.{ + AbstractHiveSession, + HiveConcurrentSession, + HiveSession +} import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.CREATE_HIVE_EXECUTOR_ERROR import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.HIVE_EXEC_JAR_ERROR import org.apache.linkis.engineplugin.hive.exception.HiveSessionStartFailedException -import org.apache.linkis.engineplugin.hive.executor.HiveEngineConnExecutor +import org.apache.linkis.engineplugin.hive.executor.{ + HiveEngineConcurrentConnExecutor, + HiveEngineConnExecutor +} import org.apache.linkis.hadoop.common.utils.HDFSUtils +import org.apache.linkis.manager.engineplugin.common.conf.EnvConfiguration import org.apache.linkis.manager.label.entity.engine.{EngineType, RunType} import org.apache.linkis.manager.label.entity.engine.EngineType.EngineType import org.apache.linkis.manager.label.entity.engine.RunType.RunType import org.apache.commons.lang3.StringUtils +import org.apache.hadoop.fs.Path import org.apache.hadoop.hive.conf.HiveConf import org.apache.hadoop.hive.ql.Driver import org.apache.hadoop.hive.ql.session.SessionState +import org.apache.hadoop.security.UserGroupInformation import java.io.{ByteArrayOutputStream, PrintStream} +import java.nio.file.Paths import java.security.PrivilegedExceptionAction +import java.util import scala.collection.JavaConverters._ @@ -48,6 +61,7 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w private val HIVE_QUEUE_NAME: String = "mapreduce.job.queuename" private val BDP_QUEUE_NAME: String = "wds.linkis.rm.yarnqueue" + private val HIVE_TEZ_QUEUE_NAME: String = "tez.queue.name" override protected def newExecutor( id: Int, @@ -63,6 +77,14 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w hiveSession.hiveConf, hiveSession.baos ) + case hiveConcurrentSession: HiveConcurrentSession => + new HiveEngineConcurrentConnExecutor( + id, + hiveConcurrentSession.sessionState, + hiveConcurrentSession.ugi, + hiveConcurrentSession.hiveConf, + hiveConcurrentSession.baos + ) case _ => throw HiveSessionStartFailedException( CREATE_HIVE_EXECUTOR_ERROR.getErrorCode, @@ -73,9 +95,80 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w override protected def createEngineConnSession( engineCreationContext: EngineCreationContext - ): HiveSession = { - val options = engineCreationContext.getOptions + ): AbstractHiveSession = { + // if hive engine support concurrent, return HiveConcurrentSession + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getHotValue) { + return doCreateHiveConcurrentSession(engineCreationContext.getOptions) + } + + // return HiveSession + doCreateHiveSession(engineCreationContext.getOptions) + } + + def doCreateHiveConcurrentSession(options: util.Map[String, String]): HiveConcurrentSession = { + val hiveConf: HiveConf = getHiveConf(options) + val ugi = HDFSUtils.getUserGroupInformation(Utils.getJvmUser) + val baos = new ByteArrayOutputStream() + val sessionState: SessionState = getSessionState(hiveConf, ugi, baos) + HiveConcurrentSession(sessionState, ugi, hiveConf, baos) + } + + def doCreateHiveSession(options: util.Map[String, String]): HiveSession = { + val hiveConf: HiveConf = getHiveConf(options) + val ugi = HDFSUtils.getUserGroupInformation(Utils.getJvmUser) + val baos = new ByteArrayOutputStream() + val sessionState: SessionState = getSessionState(hiveConf, ugi, baos) + HiveSession(sessionState, ugi, hiveConf, baos) + } + + private def getSessionState( + hiveConf: HiveConf, + ugi: UserGroupInformation, + baos: ByteArrayOutputStream + ) = { + val sessionState: SessionState = ugi.doAs(new PrivilegedExceptionAction[SessionState] { + override def run(): SessionState = new SessionState(hiveConf) + }) + sessionState.out = new PrintStream(baos, true, "utf-8") + sessionState.info = new PrintStream(System.out, true, "utf-8") + sessionState.err = new PrintStream(System.out, true, "utf-8") + SessionState.start(sessionState) + sessionState + } + + private def getHiveConf(options: util.Map[String, String]) = { val hiveConf: HiveConf = HiveUtils.getHiveConf + + if (HiveEngineConfiguration.HIVE_RANGER_ENABLE) { + hiveConf.addResource( + new Path( + Paths + .get(EnvConfiguration.HIVE_CONF_DIR.getValue, "ranger-hive-security.xml") + .toAbsolutePath + .toFile + .getAbsolutePath + ) + ) + hiveConf.addResource( + new Path( + Paths + .get(EnvConfiguration.HIVE_CONF_DIR.getValue, "ranger-hive-audit.xml") + .toAbsolutePath + .toFile + .getAbsolutePath + ) + ) + hiveConf.set("hive.security.authorization.enabled", "true") + hiveConf.set( + "hive.security.authorization.manager", + "org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizerFactory" + ) + hiveConf.set( + "hive.conf.restricted.list", + "hive.security.authorization.manager,hive.security.metastore.authorization.manager," + + "hive.security.metastore.authenticator.manager,hive.users.in.admin.role,hive.server2.xsrf.filter.enabled,hive.security.authorization.enabled" + ) + } hiveConf.setVar( HiveConf.ConfVars.HIVEJAR, HiveUtils @@ -95,7 +188,12 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w } .foreach { case (k, v) => logger.info(s"key is $k, value is $v") - if (BDP_QUEUE_NAME.equals(k)) hiveConf.set(HIVE_QUEUE_NAME, v) else hiveConf.set(k, v) + if (BDP_QUEUE_NAME.equals(k)) { + hiveConf.set(HIVE_QUEUE_NAME, v) + if ("tez".equals(HiveEngineConfiguration.HIVE_ENGINE_TYPE)) { + hiveConf.set(HIVE_TEZ_QUEUE_NAME, v) + } + } else hiveConf.set(k, v) } hiveConf.setVar( HiveConf.ConfVars.HIVE_HADOOP_CLASSPATH, @@ -126,17 +224,7 @@ class HiveEngineConnFactory extends ComputationSingleExecutorEngineConnFactory w */ // enable hive.stats.collect.scancols hiveConf.setBoolean("hive.stats.collect.scancols", true) - val ugi = HDFSUtils.getUserGroupInformation(Utils.getJvmUser) - val sessionState: SessionState = ugi.doAs(new PrivilegedExceptionAction[SessionState] { - override def run(): SessionState = new SessionState(hiveConf) - }) - val baos = new ByteArrayOutputStream() - sessionState.out = new PrintStream(baos, true, "utf-8") - sessionState.info = new PrintStream(System.out, true, "utf-8") - sessionState.err = new PrintStream(System.out, true, "utf-8") - SessionState.start(sessionState) - - HiveSession(sessionState, ugi, hiveConf, baos) + hiveConf } override protected def getEngineConnType: EngineType = EngineType.HIVE diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/entity/HiveSession.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/entity/HiveSession.scala index c75478925b..95aebac7e1 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/entity/HiveSession.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/entity/HiveSession.scala @@ -23,9 +23,18 @@ import org.apache.hadoop.security.UserGroupInformation import java.io.ByteArrayOutputStream +abstract class AbstractHiveSession + case class HiveSession( sessionState: SessionState, ugi: UserGroupInformation, hiveConf: HiveConf, baos: ByteArrayOutputStream = null -) +) extends AbstractHiveSession + +case class HiveConcurrentSession( + sessionState: SessionState, + ugi: UserGroupInformation, + hiveConf: HiveConf, + baos: ByteArrayOutputStream = null +) extends AbstractHiveSession diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConcurrentConnExecutor.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConcurrentConnExecutor.scala new file mode 100644 index 0000000000..dc02216786 --- /dev/null +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConcurrentConnExecutor.scala @@ -0,0 +1,542 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineplugin.hive.executor + +import org.apache.linkis.common.exception.ErrorException +import org.apache.linkis.common.utils.{ByteTimeUtils, Logging, Utils} +import org.apache.linkis.engineconn.computation.executor.execute.{ + ConcurrentComputationExecutor, + EngineExecutionContext +} +import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.executor.entity.{ConcurrentExecutor, ResourceFetchExecutor} +import org.apache.linkis.engineplugin.hive.conf.{Counters, HiveEngineConfiguration} +import org.apache.linkis.engineplugin.hive.creation.HiveEngineConnFactory +import org.apache.linkis.engineplugin.hive.cs.CSHiveHelper +import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.COMPILE_HIVE_QUERY_ERROR +import org.apache.linkis.engineplugin.hive.errorcode.HiveErrorCodeSummary.GET_FIELD_SCHEMAS_ERROR +import org.apache.linkis.engineplugin.hive.exception.HiveQueryFailedException +import org.apache.linkis.governance.common.paser.SQLCodeParser +import org.apache.linkis.governance.common.utils.JobUtils +import org.apache.linkis.hadoop.common.conf.HadoopConf +import org.apache.linkis.manager.common.entity.resource.{ + CommonNodeResource, + LoadInstanceResource, + NodeResource +} +import org.apache.linkis.manager.common.protocol.resource.ResourceWithStatus +import org.apache.linkis.manager.engineplugin.common.util.NodeResourceUtils +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.protocol.engine.JobProgressInfo +import org.apache.linkis.scheduler.executer.{ + CompletedExecuteResponse, + ErrorExecuteResponse, + ExecuteResponse, + SuccessExecuteResponse +} +import org.apache.linkis.storage.domain.{Column, DataType} +import org.apache.linkis.storage.resultset.ResultSetFactory +import org.apache.linkis.storage.resultset.table.{TableMetaData, TableRecord} + +import org.apache.commons.lang3.StringUtils +import org.apache.hadoop.hive.conf.HiveConf +import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema} +import org.apache.hadoop.hive.ql.exec.Utilities +import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper +import org.apache.hadoop.hive.ql.processors.{ + CommandProcessor, + CommandProcessorFactory, + CommandProcessorResponse +} +import org.apache.hadoop.hive.ql.session.SessionState +import org.apache.hadoop.mapred.{JobStatus, RunningJob} +import org.apache.hadoop.security.UserGroupInformation + +import java.io.ByteArrayOutputStream +import java.security.PrivilegedExceptionAction +import java.util +import java.util.concurrent.{ + Callable, + ConcurrentHashMap, + LinkedBlockingQueue, + ThreadPoolExecutor, + TimeUnit +} + +import scala.collection.JavaConverters._ +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer + +import com.google.common.util.concurrent.ThreadFactoryBuilder +import org.slf4j.LoggerFactory + +class HiveEngineConcurrentConnExecutor( + id: Int, + sessionState: SessionState, + ugi: UserGroupInformation, + hiveConf: HiveConf, + baos: ByteArrayOutputStream = null +) extends ConcurrentComputationExecutor + with ResourceFetchExecutor { + + private val LOG = LoggerFactory.getLogger(getClass) + + private val namePrefix: String = "HiveEngineExecutor_" + + private val executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]]() + + private val driverCache: util.Map[String, HiveDriverProxy] = + new ConcurrentHashMap[String, HiveDriverProxy]() + + private val applicationStringName = "application" + + private val splitter = "_" + + private var backgroundOperationPool: ThreadPoolExecutor = _ + + override def init(): Unit = { + LOG.info(s"Ready to change engine state!") + if (HadoopConf.KEYTAB_PROXYUSER_ENABLED.getValue) { + System.setProperty("javax.security.auth.useSubjectCredsOnly", "false"); + } + setCodeParser(new SQLCodeParser) + + val queue = new LinkedBlockingQueue[Runnable](100) + backgroundOperationPool = new ThreadPoolExecutor( + 100, + 100, + 10, + TimeUnit.SECONDS, + queue, + new ThreadFactoryBuilder().setNameFormat("Hive-Background-Pool-%d").build + ) + backgroundOperationPool.allowCoreThreadTimeOut(true) + super.init() + } + + override def executeLine( + engineExecutorContext: EngineExecutionContext, + code: String + ): ExecuteResponse = { + LOG.info(s"HiveEngineConcurrentConnExecutor Ready to executeLine: $code") + val taskId: String = engineExecutorContext.getJobId.get + CSHiveHelper.setContextIDInfoToHiveConf(engineExecutorContext, hiveConf) + + val realCode = code.trim() + + LOG.info(s"hive client begins to run hql code:\n ${realCode.trim}") + val jobId = JobUtils.getJobIdFromMap(engineExecutorContext.getProperties) + if (StringUtils.isNotBlank(jobId)) { + LOG.info(s"set mapreduce.job.tags=LINKIS_$jobId") + hiveConf.set("mapreduce.job.tags", s"LINKIS_$jobId") + } + if (realCode.trim.length > 500) { + engineExecutorContext.appendStdout(s"$getId >> ${realCode.trim.substring(0, 500)} ...") + } else engineExecutorContext.appendStdout(s"$getId >> ${realCode.trim}") + val tokens = realCode.trim.split("""\s+""") + + val operation = new Callable[ExecuteResponse] { + override def call(): ExecuteResponse = { + SessionState.setCurrentSessionState(sessionState) + sessionState.setLastCommand(code) + + val proc = CommandProcessorFactory.get(tokens, hiveConf) + LOG.debug("ugi is " + ugi.getUserName) + ugi.doAs(new PrivilegedExceptionAction[ExecuteResponse]() { + override def run(): ExecuteResponse = { + proc match { + case any if HiveDriverProxy.isDriver(any) => + logger.info(s"driver is $any") + + val driver = new HiveDriverProxy(any) + driverCache.put(taskId, driver) + executeHQL( + engineExecutorContext.getJobId.get, + engineExecutorContext, + realCode, + driver + ) + case _ => + val resp = proc.run(realCode.substring(tokens(0).length).trim) + val result = new String(baos.toByteArray) + logger.info("RESULT => {}", result) + engineExecutorContext.appendStdout(result) + baos.reset() + if (resp.getResponseCode != 0) { + onComplete() + throw resp.getException + } + onComplete() + SuccessExecuteResponse() + } + } + }) + } + } + + val future = backgroundOperationPool.submit(operation) + future.get() + } + + def logMemoryCache(): Unit = { + logger.info(s"logMemoryCache running driver number: ${driverCache.size()}") + for (driverEntry <- driverCache.asScala) { + logger.info(s"running driver with taskId : ${driverEntry._1} .") + } + } + + private def executeHQL( + taskId: String, + engineExecutorContext: EngineExecutionContext, + realCode: String, + driver: HiveDriverProxy + ): ExecuteResponse = { + var needRetry: Boolean = true + var tryCount: Int = 0 + var hasResult: Boolean = false + var rows: Int = 0 + var columnCount: Int = 0 + + while (needRetry) { + needRetry = false + driver.setTryCount(tryCount + 1) + val startTime = System.currentTimeMillis() + + try { + val hiveResponse: CommandProcessorResponse = + if (!HiveDriverProxy.isIDriver(driver.getDriver())) { + var compileRet = -1 + Utils.tryCatch { + compileRet = driver.compile(realCode) + logger.info( + s"driver compile realCode : \n ${realCode} \n finished, status : ${compileRet}" + ) + if (0 != compileRet) { + logger.warn(s"compile realCode : \n ${realCode} \n error status : ${compileRet}") + throw HiveQueryFailedException( + COMPILE_HIVE_QUERY_ERROR.getErrorCode, + COMPILE_HIVE_QUERY_ERROR.getErrorDesc + ) + } + + val queryPlan = driver.getPlan() + val numberOfJobs = Utilities.getMRTasks(queryPlan.getRootTasks).size + if (numberOfJobs > 0) { + engineExecutorContext.appendStdout( + s"Your hive taskId: $taskId has $numberOfJobs MR jobs to do" + ) + } + + logger.info(s"there are ${numberOfJobs} jobs.") + } { + case e: Exception => logger.warn("obtain hive execute query plan failed,", e) + case t: Throwable => logger.warn("obtain hive execute query plan failed,", t) + } + + driver.run(realCode, compileRet == 0) + } else { + driver.run(realCode) + } + if (hiveResponse.getResponseCode != 0) { + LOG.error("Hive query failed, response code is {}", hiveResponse.getResponseCode) + // todo check uncleared context ? + return ErrorExecuteResponse(hiveResponse.getErrorMessage, hiveResponse.getException) + } + + engineExecutorContext.appendStdout( + s"Time taken: ${ByteTimeUtils.msDurationToString(System.currentTimeMillis() - startTime)}, begin to fetch results." + ) + LOG.info( + s"$getId >> Time taken: ${ByteTimeUtils.msDurationToString(System.currentTimeMillis() - startTime)}, begin to fetch results." + ) + + val fieldSchemas = + if (hiveResponse.getSchema != null) hiveResponse.getSchema.getFieldSchemas + else if (driver.getSchema != null) { + driver.getSchema.getFieldSchemas + } else { + throw HiveQueryFailedException( + GET_FIELD_SCHEMAS_ERROR.getErrorCode, + GET_FIELD_SCHEMAS_ERROR.getErrorDesc + ) + } + LOG.debug("fieldSchemas are " + fieldSchemas) + if (fieldSchemas == null || isNoResultSql(realCode)) { + // IOUtils.closeQuietly(resultSetWriter) + onComplete() + + return SuccessExecuteResponse() + } + // get column data + val metaData: TableMetaData = + getResultMetaData(fieldSchemas, engineExecutorContext.getEnableResultsetMetaWithTableName) + // send result + rows = sendResultSet(engineExecutorContext, driver, metaData) + columnCount = if (fieldSchemas != null) fieldSchemas.size() else 0 + hasResult = true + + } catch { + case e if HiveDriverProxy.isCommandNeedRetryException(e) => + tryCount += 1 + needRetry = true + onComplete() + + LOG.warn("Retry hive query with a different approach...") + case t: Throwable => + LOG.error(s"query failed, reason : ", t) + onComplete() + return ErrorExecuteResponse(t.getMessage, t) + } finally { + driverCache.remove(taskId) + logMemoryCache() + } + } + if (hasResult) { + engineExecutorContext.appendStdout(s"Fetched $columnCount col(s) : $rows row(s) in hive") + LOG.info(s"$getId >> Fetched $columnCount col(s) : $rows row(s) in hive") + } + + onComplete() + SuccessExecuteResponse() + } + + private def sendResultSet( + engineExecutorContext: EngineExecutionContext, + driver: HiveDriverProxy, + metaData: TableMetaData + ): Int = { + val resultSetWriter = engineExecutorContext.createResultSetWriter(ResultSetFactory.TABLE_TYPE) + resultSetWriter.addMetaData(metaData) + val colLength = metaData.columns.length + val result = new util.ArrayList[String]() + var rows = 0 + while (driver.getResults(result)) { + val scalaResult: mutable.Buffer[String] = result.asScala + scalaResult foreach { s => + val arr: Array[String] = s.split("\t") + val arrAny: ArrayBuffer[Any] = new ArrayBuffer[Any]() + if (arr.length > colLength) { + logger.error( + s"""There is a \t tab in the result of hive code query, hive cannot cut it, please use spark to execute(查询的结果中有\t制表符,hive不能进行切割,请使用spark执行)""" + ) + throw new ErrorException( + 60078, + """There is a \t tab in the result of your query, hive cannot cut it, please use spark to execute(您查询的结果中有\t制表符,hive不能进行切割,请使用spark执行)""" + ) + } + if (arr.length == colLength) arr foreach arrAny.asJava.add + else if (arr.length == 0) for (i <- 1 to colLength) arrAny.asJava add "" + else { + val i = colLength - arr.length + arr foreach arrAny.asJava.add + for (i <- 1 to i) arrAny.asJava add "" + } + resultSetWriter.addRecord(new TableRecord(arrAny.toArray)) + } + rows += result.size + result.clear() + } + engineExecutorContext.sendResultSet(resultSetWriter) + rows + } + + private def getResultMetaData( + fieldSchemas: util.List[FieldSchema], + useTableName: Boolean + ): TableMetaData = { + var results: util.List[FieldSchema] = null + val nameSet = new mutable.HashSet[String]() + val cleanSchema = new util.ArrayList[FieldSchema]() + fieldSchemas.asScala foreach { fieldSchema => + val name = fieldSchema.getName + if (name.split('.').length == 2) { + nameSet.add(name.split('.')(1)) + cleanSchema.asScala += new FieldSchema( + name.split('.')(1), + fieldSchema.getType, + fieldSchema.getComment + ) + } + } + if (nameSet.size < fieldSchemas.asScala.length) { + results = fieldSchemas + } else { + if (useTableName) { + results = fieldSchemas + } else { + results = cleanSchema + } + } + + val columns = results.asScala + .map(result => + new Column( + result.getName, + DataType.toDataType(result.getType.toLowerCase()), + result.getComment + ) + ) + .toArray[Column] + val metaData = new TableMetaData(columns) + metaData + } + + private def isNoResultSql(sql: String): Boolean = { + if (sql.trim.startsWith("create table") || sql.trim.startsWith("drop table")) true else false + } + + /** + * Before the job is completed, all the remaining contents of the singleSqlProgressMap should be + * changed to success + */ + private def onComplete(): Unit = {} + + override def executeCompletely( + engineExecutorContext: EngineExecutionContext, + code: String, + completedLine: String + ): ExecuteResponse = { + val completeCode = code + completedLine + executeLine(engineExecutorContext, completeCode) + } + + override def close(): Unit = { + killAll() + + if (backgroundOperationPool != null) { + backgroundOperationPool.shutdown() + try backgroundOperationPool.awaitTermination(10, TimeUnit.SECONDS) + catch { + case e: InterruptedException => + LOG.warn( + "HIVE_SERVER2_ASYNC_EXEC_SHUTDOWN_TIMEOUT = " + 10 + " seconds has been exceeded. RUNNING background operations will be shut down", + e + ) + } + backgroundOperationPool = null + } + super.close() + } + + override def FetchResource: util.HashMap[String, ResourceWithStatus] = { + val resourceMap = new util.HashMap[String, ResourceWithStatus]() + HadoopJobExecHelper.runningJobs.asScala.foreach(yarnJob => { + val counters = yarnJob.getCounters + if (counters != null) { + val millsMap = counters.getCounter(Counters.MILLIS_MAPS) + val millsReduces = counters.getCounter(Counters.MILLIS_REDUCES) + val totalMapCores = counters.getCounter(Counters.VCORES_MILLIS_MAPS) + val totalReducesCores = counters.getCounter(Counters.VCORES_MILLIS_REDUCES) + val totalMapMBMemory = counters.getCounter(Counters.MB_MILLIS_MAPS) + val totalReducesMBMemory = counters.getCounter(Counters.MB_MILLIS_REDUCES) + var avgCores = 0 + var avgMemory = 0L + if (millsMap > 0 && millsReduces > 0) { + avgCores = Math.ceil(totalMapCores / millsMap + totalReducesCores / millsReduces).toInt + avgMemory = Math + .ceil( + totalMapMBMemory * 1024 * 1024 / millsMap + totalReducesMBMemory.toLong * 1024 * 1024 / millsReduces + ) + .toLong + val yarnResource = new ResourceWithStatus( + avgMemory, + avgCores, + 0, + JobStatus.getJobRunState(yarnJob.getJobStatus.getRunState), + "default" + ) + val applicationId = + applicationStringName + splitter + yarnJob.getID.getJtIdentifier + splitter + yarnJob.getID.getId + resourceMap.put(applicationId, yarnResource) + } + } + }) + resourceMap + } + + override def progress(taskID: String): Float = 0.0f + + override def getProgressInfo(taskID: String): Array[JobProgressInfo] = + Array.empty[JobProgressInfo] + + override def killTask(taskID: String): Unit = { + cleanup(taskID) + super.killTask(taskID) + } + + override def getConcurrentLimit: Int = HiveEngineConfiguration.HIVE_ENGINE_CONCURRENT_LIMIT + + override def killAll(): Unit = { + val iterator = driverCache.entrySet().iterator() + while (iterator.hasNext) { + val entry = iterator.next() + val taskID = entry.getKey + cleanup(taskID) + + super.killTask(taskID) + iterator.remove() + } + + sessionState.deleteTmpOutputFile() + sessionState.deleteTmpErrOutputFile() + sessionState.close() + } + + private def cleanup(taskID: String) = { + val driver = driverCache.get(taskID) + if (driver == null) LOG.warn(s"do cleanup taskId :${taskID} driver is null.") + else { + driver.close() + driverCache.remove(taskID) + } + + LOG.info(s"hive begins to kill job with id : ${taskID}") + // Configure the engine through the wds.linkis.hive.engine.type parameter to control the way the task is killed + LOG.info(s"hive engine type :${HiveEngineConfiguration.HIVE_ENGINE_TYPE}") + LOG.info("hive killed job successfully") + } + + override def supportCallBackLogs(): Boolean = { + // todo + true + } + + override def getExecutorLabels(): util.List[Label[_]] = executorLabels + + override def setExecutorLabels(labels: util.List[Label[_]]): Unit = { + if (null != labels) { + executorLabels.clear() + executorLabels.addAll(labels) + } + } + + override def requestExpectedResource(expectedResource: NodeResource): NodeResource = { + null + } + + override def getCurrentNodeResource(): NodeResource = { + val resource = new CommonNodeResource + resource.setUsedResource( + NodeResourceUtils + .applyAsLoadInstanceResource(EngineConnObject.getEngineCreationContext.getOptions) + ) + resource + } + + override def getId(): String = namePrefix + id +} diff --git a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala index d73cd9a6c7..a4c75e7638 100644 --- a/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/hive/src/main/scala/org/apache/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala @@ -204,9 +204,11 @@ class HiveEngineConnExecutor( var compileRet = -1 Utils.tryCatch { compileRet = driver.compile(realCode) - logger.info(s"driver compile realCode : ${realCode} finished, status : ${compileRet}") + logger.info( + s"driver compile realCode : \n ${realCode} \n finished, status : ${compileRet}" + ) if (0 != compileRet) { - logger.warn(s"compile realCode : ${realCode} error status : ${compileRet}") + logger.warn(s"compile realCode : \n ${realCode} \n error status : ${compileRet}") throw HiveQueryFailedException( COMPILE_HIVE_QUERY_ERROR.getErrorCode, COMPILE_HIVE_QUERY_ERROR.getErrorDesc @@ -379,7 +381,11 @@ class HiveEngineConnExecutor( val columns = results.asScala .map(result => - Column(result.getName, DataType.toDataType(result.getType.toLowerCase()), result.getComment) + new Column( + result.getName, + DataType.toDataType(result.getType.toLowerCase()), + result.getComment + ) ) .toArray[Column] val metaData = new TableMetaData(columns) @@ -640,12 +646,10 @@ class HiveDriverProxy(driver: Any) extends Logging { } def getResults(res: util.List[_]): Boolean = { - Utils.tryAndWarn { - driver.getClass - .getMethod("getResults", classOf[util.List[_]]) - .invoke(driver, res.asInstanceOf[AnyRef]) - .asInstanceOf[Boolean] - } + driver.getClass + .getMethod("getResults", classOf[util.List[_]]) + .invoke(driver, res.asInstanceOf[AnyRef]) + .asInstanceOf[Boolean] } def close(): Unit = { diff --git a/linkis-engineconn-plugins/io_file/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/io_file/src/main/resources/log4j2.xml index 7852867ba3..e75ce8392b 100644 --- a/linkis-engineconn-plugins/io_file/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/io_file/src/main/resources/log4j2.xml @@ -20,7 +20,7 @@ - + @@ -31,12 +31,12 @@ - + - + diff --git a/linkis-engineconn-plugins/jdbc/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/jdbc/src/main/resources/log4j2.xml index 83186732fc..2e36367c4a 100644 --- a/linkis-engineconn-plugins/jdbc/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/jdbc/src/main/resources/log4j2.xml @@ -20,7 +20,7 @@ - + @@ -31,12 +31,12 @@ - + - + diff --git a/linkis-engineconn-plugins/openlookeng/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/openlookeng/src/main/resources/log4j2.xml index e9e7bb70ff..35ee726620 100644 --- a/linkis-engineconn-plugins/openlookeng/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/openlookeng/src/main/resources/log4j2.xml @@ -20,17 +20,17 @@ - + - + - + diff --git a/linkis-engineconn-plugins/pipeline/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/pipeline/src/main/resources/log4j2.xml index 03517a50e8..86102506c8 100644 --- a/linkis-engineconn-plugins/pipeline/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/pipeline/src/main/resources/log4j2.xml @@ -20,7 +20,7 @@ - + @@ -31,12 +31,12 @@ - + - + diff --git a/linkis-engineconn-plugins/pipeline/src/main/scala/org/apache/linkis/manager/engineplugin/pipeline/executor/ExcelExecutor.scala b/linkis-engineconn-plugins/pipeline/src/main/scala/org/apache/linkis/manager/engineplugin/pipeline/executor/ExcelExecutor.scala index 42fd016e28..42c0e27cd2 100644 --- a/linkis-engineconn-plugins/pipeline/src/main/scala/org/apache/linkis/manager/engineplugin/pipeline/executor/ExcelExecutor.scala +++ b/linkis-engineconn-plugins/pipeline/src/main/scala/org/apache/linkis/manager/engineplugin/pipeline/executor/ExcelExecutor.scala @@ -18,6 +18,7 @@ package org.apache.linkis.manager.engineplugin.pipeline.executor import org.apache.linkis.common.io.FsPath +import org.apache.linkis.common.utils.ResultSetUtils import org.apache.linkis.engineconn.computation.executor.execute.EngineExecutionContext import org.apache.linkis.manager.engineplugin.pipeline.conf.PipelineEngineConfiguration import org.apache.linkis.manager.engineplugin.pipeline.conf.PipelineEngineConfiguration.PIPELINE_OUTPUT_ISOVERWRITE_SWITCH @@ -84,8 +85,9 @@ class ExcelExecutor extends PipeLineExecutor { if (fsPathListWithError == null) { throw new PipeLineErrorException(EMPTY_DIR.getErrorCode, EMPTY_DIR.getErrorDesc) } - fileSource = - FileSource.create(fsPathListWithError.getFsPaths.toArray(Array[FsPath]()), sourceFs) + val fsPathList = fsPathListWithError.getFsPaths + ResultSetUtils.sortByNameNum(fsPathList) + fileSource = FileSource.create(fsPathList.toArray(Array[FsPath]()), sourceFs) } if (!FileSource.isTableResultSet(fileSource)) { throw new PipeLineErrorException( diff --git a/linkis-engineconn-plugins/presto/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/presto/src/main/resources/log4j2.xml index 020b94567e..2cd3e264c3 100644 --- a/linkis-engineconn-plugins/presto/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/presto/src/main/resources/log4j2.xml @@ -20,23 +20,23 @@ - + - + - + - + diff --git a/linkis-engineconn-plugins/presto/src/main/scala/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.scala b/linkis-engineconn-plugins/presto/src/main/scala/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.scala index cdcec75191..56d984e9cf 100644 --- a/linkis-engineconn-plugins/presto/src/main/scala/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/presto/src/main/scala/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.scala @@ -19,6 +19,7 @@ package org.apache.linkis.engineplugin.presto.executor import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{OverloadUtils, Utils} +import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskLogUpdateEvent import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} import org.apache.linkis.engineconn.computation.executor.entity.EngineConnTask import org.apache.linkis.engineconn.computation.executor.execute.{ @@ -26,6 +27,7 @@ import org.apache.linkis.engineconn.computation.executor.execute.{ EngineExecutionContext } import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineplugin.presto.conf.PrestoConfiguration._ import org.apache.linkis.engineplugin.presto.conf.PrestoEngineConf import org.apache.linkis.engineplugin.presto.errorcode.PrestoErrorCodeSummary @@ -436,6 +438,19 @@ class PrestoEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) } override def close(): Unit = { + val taskIds = statementClientCache.keySet().iterator() + val lbs = ExecutorListenerBusContext.getExecutorListenerBusContext() + while (taskIds.hasNext) { + val taskId = taskIds.next() + Utils.tryAndWarn( + lbs.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + taskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) + ) + } killAll() super.close() } diff --git a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/errorcode/LinkisPythonErrorCodeSummary.java b/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/errorcode/LinkisPythonErrorCodeSummary.java index c17fc81e9d..9158ae6a8a 100644 --- a/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/errorcode/LinkisPythonErrorCodeSummary.java +++ b/linkis-engineconn-plugins/python/src/main/java/org/apache/linkis/manager/engineplugin/python/errorcode/LinkisPythonErrorCodeSummary.java @@ -20,7 +20,7 @@ import org.apache.linkis.common.errorcode.LinkisErrorCode; public enum LinkisPythonErrorCodeSummary implements LinkisErrorCode { - PYTHON_EXECUTE_ERROR(41001, ""), + PYTHON_EXECUTE_ERROR(60002, ""), PYSPARK_PROCESSS_STOPPED( 60003, "Pyspark process has stopped, query failed!(Pyspark 进程已停止,查询失败!)"), INVALID_PYTHON_SESSION(400201, "Invalid python session.(无效的 python 会话.)"); diff --git a/linkis-engineconn-plugins/python/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/python/src/main/resources/log4j2.xml index a385c888fa..1b04ca2996 100644 --- a/linkis-engineconn-plugins/python/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/python/src/main/resources/log4j2.xml @@ -20,7 +20,7 @@ - + @@ -31,12 +31,12 @@ - + - + diff --git a/linkis-engineconn-plugins/seatunnel/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/seatunnel/src/main/resources/log4j2.xml index 83186732fc..2e36367c4a 100644 --- a/linkis-engineconn-plugins/seatunnel/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/seatunnel/src/main/resources/log4j2.xml @@ -20,7 +20,7 @@ - + @@ -31,12 +31,12 @@ - + - + diff --git a/linkis-engineconn-plugins/shell/src/main/resources/conf/log4j2.xml b/linkis-engineconn-plugins/shell/src/main/resources/conf/log4j2.xml index 68173f23ac..027b17e086 100644 --- a/linkis-engineconn-plugins/shell/src/main/resources/conf/log4j2.xml +++ b/linkis-engineconn-plugins/shell/src/main/resources/conf/log4j2.xml @@ -20,23 +20,23 @@ - + - + - + - + diff --git a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.scala b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.scala index 8277cb1164..b59e47c2bc 100644 --- a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.scala +++ b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ReaderThread.scala @@ -28,6 +28,8 @@ import java.io.BufferedReader import java.util import java.util.concurrent.CountDownLatch +import com.google.common.collect.EvictingQueue + class ReaderThread extends Thread with Logging { private var engineExecutionContext: EngineExecutionContext = _ @@ -37,6 +39,8 @@ class ReaderThread extends Thread with Logging { private val logListCount = CommonVars[Int]("wds.linkis.engineconn.log.list.count", 50) private var counter: CountDownLatch = _ + private var outEvictingQueue: EvictingQueue[String] = EvictingQueue.create(5) + private var isReaderAlive = true def this( @@ -58,6 +62,10 @@ class ReaderThread extends Thread with Logging { isReaderAlive = false } + def getOutString(): String = { + StringUtils.join(outEvictingQueue.toArray, "\n") + } + def startReaderThread(): Unit = { Utils.tryCatch { this.start() @@ -74,6 +82,7 @@ class ReaderThread extends Thread with Logging { logger.info("read logger line :{}", line) logArray.add(line) extractor.appendLineToExtractor(line) + outEvictingQueue.add(line); if (isStdout) engineExecutionContext.appendTextResultSet(line) if (logArray.size > logListCount.getValue) { val linelist = StringUtils.join(logArray, "\n") diff --git a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.scala b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.scala index b66353ca09..b1567b088b 100644 --- a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.scala +++ b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnConcurrentExecutor.scala @@ -17,12 +17,15 @@ package org.apache.linkis.manager.engineplugin.shell.executor +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{Logging, OverloadUtils, Utils} +import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskLogUpdateEvent import org.apache.linkis.engineconn.computation.executor.execute.{ ConcurrentComputationExecutor, EngineExecutionContext } import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.governance.common.utils.GovernanceUtils import org.apache.linkis.manager.common.entity.resource.{ CommonNodeResource, @@ -207,7 +210,10 @@ class ShellEngineConnConcurrentExecutor(id: Int, maxRunningNumber: Int) completed.set(true) if (exitCode != 0) { - ErrorExecuteResponse("run shell failed", ShellCodeErrorException()) + ErrorExecuteResponse( + s"run shell failed with error:\n ${errReaderThread.getOutString()}", + ShellCodeErrorException() + ) } else SuccessExecuteResponse() } catch { @@ -324,8 +330,22 @@ class ShellEngineConnConcurrentExecutor(id: Int, maxRunningNumber: Int) } override def close(): Unit = { + val lbs = ExecutorListenerBusContext.getExecutorListenerBusContext() Utils.tryCatch { - killAll() + val iterator = shellECTaskInfoCache.values().iterator() + while (iterator.hasNext) { + val shellECTaskInfo = iterator.next() + Utils.tryAndWarn( + lbs.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + shellECTaskInfo.taskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) + ) + Utils.tryAndWarn(killTask(shellECTaskInfo.taskId)) + } + logAsyncService.shutdown() } { t: Throwable => logger.error(s"Shell ec failed to close ", t) @@ -339,6 +359,7 @@ class ShellEngineConnConcurrentExecutor(id: Int, maxRunningNumber: Int) val shellECTaskInfo = iterator.next() Utils.tryAndWarn(killTask(shellECTaskInfo.taskId)) } + shellECTaskInfoCache.clear() } override def getConcurrentLimit: Int = { diff --git a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.scala b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.scala index c9f3206259..a7d321e3aa 100644 --- a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/executor/ShellEngineConnExecutor.scala @@ -194,7 +194,10 @@ class ShellEngineConnExecutor(id: Int) extends ComputationExecutor with Logging completed.set(true) if (exitCode != 0) { - ErrorExecuteResponse("run shell failed", ShellCodeErrorException()) + ErrorExecuteResponse( + s"run shell failed with error:\n ${errReaderThread.getOutString()}", + ShellCodeErrorException() + ) } else SuccessExecuteResponse() } catch { case e: Exception => diff --git a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/factory/ShellEngineConnFactory.scala b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/factory/ShellEngineConnFactory.scala index cbfffc244d..85baceef62 100644 --- a/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/factory/ShellEngineConnFactory.scala +++ b/linkis-engineconn-plugins/shell/src/main/scala/org/apache/linkis/manager/engineplugin/shell/factory/ShellEngineConnFactory.scala @@ -38,7 +38,7 @@ class ShellEngineConnFactory extends ComputationSingleExecutorEngineConnFactory engineCreationContext: EngineCreationContext, engineConn: EngineConn ): LabelExecutor = { - if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM) { + if (AccessibleExecutorConfiguration.ENGINECONN_SUPPORT_PARALLELISM.getValue) { new ShellEngineConnConcurrentExecutor( id, ShellEngineConnConf.SHELL_ENGINECONN_CONCURRENT_LIMIT diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/context/SparkConfig.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/context/SparkConfig.java index c8f7842220..20b0749d83 100644 --- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/context/SparkConfig.java +++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/context/SparkConfig.java @@ -27,7 +27,6 @@ public class SparkConfig { private String master = "yarn"; // ("yarn") private String deployMode = "client"; // ("client") // todo cluster private String appResource; // ("") - // private String mainClass; // ("") private String appName; // ("") private String jars; // ("--jars", "") private String packages; // ("--packages", "") @@ -92,14 +91,6 @@ public void setAppResource(String appResource) { this.appResource = appResource; } - // public String getMainClass() { - // return mainClass; - // } - // - // public void setMainClass(String mainClass) { - // this.mainClass = mainClass; - // } - public String getAppName() { return appName; } @@ -299,9 +290,7 @@ public String toString() { + ", deployMode='" + deployMode + '\'' - + - // ", mainClass='" + mainClass + '\'' + - ", appName='" + + ", appName='" + appName + '\'' + ", jars='" diff --git a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/YarnApplicationClusterDescriptorAdapter.java b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/YarnApplicationClusterDescriptorAdapter.java index 21f2885b4d..9c753d862f 100644 --- a/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/YarnApplicationClusterDescriptorAdapter.java +++ b/linkis-engineconn-plugins/spark/src/main/java/org/apache/linkis/engineplugin/spark/client/deployment/YarnApplicationClusterDescriptorAdapter.java @@ -47,7 +47,6 @@ public void deployCluster(String mainClass, String args, Map con .setMaster(sparkConfig.getMaster()) .setDeployMode(sparkConfig.getDeployMode()) .setAppName(sparkConfig.getAppName()) - // .setPropertiesFile("") .setVerbose(true); sparkLauncher.setConf("spark.app.name", sparkConfig.getAppName()); if (confMap != null) confMap.forEach((k, v) -> sparkLauncher.setConf(k, v)); @@ -75,7 +74,6 @@ public void deployCluster(String mainClass, String args, Map con Arrays.stream(args.split("\\s+")) .filter(StringUtils::isNotBlank) .forEach(arg -> sparkLauncher.addAppArgs(arg)); - // sparkLauncher.addAppArgs(args); sparkAppHandle = sparkLauncher.startApplication( new SparkAppHandle.Listener() { diff --git a/linkis-engineconn-plugins/spark/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/spark/src/main/resources/log4j2.xml index b02d5889cd..de16e87efa 100644 --- a/linkis-engineconn-plugins/spark/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/spark/src/main/resources/log4j2.xml @@ -20,32 +20,32 @@ - + - + - + - + - + - + diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/SparkEngineConnPlugin.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/SparkEngineConnPlugin.scala index ea8c61eed0..c1cd0a4792 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/SparkEngineConnPlugin.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/SparkEngineConnPlugin.scala @@ -38,14 +38,10 @@ class SparkEngineConnPlugin extends EngineConnPlugin { private val resourceLocker = new Object() - private val engineLaunchBuilderLocker = new Object() - private val engineFactoryLocker = new Object() private var engineResourceFactory: EngineResourceFactory = _ - private var engineLaunchBuilder: EngineConnLaunchBuilder = _ - private var engineFactory: EngineConnFactory = _ override def init(params: util.Map[String, AnyRef]): Unit = { diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala index 3291304868..1a67e67842 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SQLSession.scala @@ -146,7 +146,7 @@ object SQLSession extends Logging { ) } val taken = ByteTimeUtils.msDurationToString(System.currentTimeMillis - startTime) - logger.warn(s"Time taken: ${taken}, Fetched $index row(s).") + logger.info(s"Time taken: ${taken}, Fetched $index row(s).") // to register TempTable // Utils.tryAndErrorMsg(CSTableRegister.registerTempTable(engineExecutorContext, writer, alias, columns))("Failed to register tmp table:") engineExecutionContext.appendStdout( @@ -178,7 +178,7 @@ object SQLSession extends Logging { } .mkString("{", ",", "}") case (seq: Seq[_], ArrayType(typ, _)) => - seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]") + seq.map(v => (v, typ)).map(toHiveStructString).mkString("[\"", "\",\"", "\"]") case (map: Map[_, _], MapType(kType, vType, _)) => map .map { case (key, value) => @@ -188,7 +188,7 @@ object SQLSession extends Logging { .sorted .mkString("{", ",", "}") case (null, _) => "null" - case (str: String, StringType) => str.replaceAll("\n|\t", " ") + // case (str: String, StringType) => str.replaceAll("\n|\t", " ") case (double: Double, DoubleType) => nf.format(double) case (decimal: java.math.BigDecimal, DecimalType()) => formatDecimal(decimal) case (other: Any, tpe) => other.toString @@ -203,7 +203,7 @@ object SQLSession extends Logging { } .mkString("{", ",", "}") case (seq: Seq[_], ArrayType(typ, _)) => - seq.map(v => (v, typ)).map(toHiveStructString).mkString("[", ",", "]") + seq.map(v => (v, typ)).map(toHiveStructString).mkString("[\"", "\",\"", "\"]") case (map: Map[_, _], MapType(kType, vType, _)) => map .map { case (key, value) => @@ -213,8 +213,13 @@ object SQLSession extends Logging { .sorted .mkString("{", ",", "}") - case (str: String, StringType) => str.replaceAll("\n|\t", " ") - case (double: Double, DoubleType) => nf.format(double) + // case (str: String, StringType) => str.replaceAll("\n|\t", " ") + case (double: Double, DoubleType) => + if (double.isNaN) { + "NaN" + } else { + nf.format(double) + } case (decimal: java.math.BigDecimal, DecimalType()) => formatDecimal(decimal) case (other: Any, tpe) => other.toString case _ => null diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala index f2a40dd06b..47275e2ba9 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala @@ -33,6 +33,7 @@ import org.apache.linkis.engineplugin.spark.extension.{ SparkPreExecutionHook } import org.apache.linkis.engineplugin.spark.utils.JobProgressUtil +import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.governance.common.exception.LinkisJobRetryException import org.apache.linkis.governance.common.utils.JobUtils import org.apache.linkis.manager.common.entity.enumeration.NodeStatus @@ -90,9 +91,14 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) } val kind: Kind = getKind var preCode = code - engineExecutorContext.appendStdout( - LogUtils.generateInfo(s"yarn application id: ${sc.applicationId}") - ) + + val isFirstParagraph = (engineExecutorContext.getCurrentParagraph == 1) + if (isFirstParagraph == true) { + engineExecutorContext.appendStdout( + LogUtils.generateInfo(s"yarn application id: ${sc.applicationId}") + ) + } + // Pre-execution hook var executionHook: SparkPreExecutionHook = null Utils.tryCatch { @@ -128,6 +134,37 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) logger.info("Set jobGroup to " + jobGroup) sc.setJobGroup(jobGroup, _code, true) + // print job configuration, only the first paragraph + if (isFirstParagraph == true) { + Utils.tryCatch({ + val executorNum: Int = sc.getConf.get("spark.executor.instances").toInt + val executorMem: Long = + ByteTimeUtils.byteStringAsGb(sc.getConf.get("spark.executor.memory")) + val driverMem: Long = ByteTimeUtils.byteStringAsGb(sc.getConf.get("spark.driver.memory")) + val sparkExecutorCores = sc.getConf.get("spark.executor.cores", "2").toInt + val sparkDriverCores = sc.getConf.get("spark.driver.cores", "1").toInt + val queue = sc.getConf.get("spark.yarn.queue") + // with unit if set configuration with unit + // if not set sc get will get the value of spark.yarn.executor.memoryOverhead such as 512(without unit) + val memoryOverhead = sc.getConf.get("spark.executor.memoryOverhead", "1G") + + val sb = new StringBuilder + sb.append(s"spark.executor.instances=$executorNum\n") + sb.append(s"spark.executor.memory=${executorMem}G\n") + sb.append(s"spark.driver.memory=${driverMem}G\n") + sb.append(s"spark.executor.cores=$sparkExecutorCores\n") + sb.append(s"spark.driver.cores=$sparkDriverCores\n") + sb.append(s"spark.yarn.queue=$queue\n") + sb.append(s"spark.executor.memoryOverhead=${memoryOverhead}\n") + sb.append("\n") + engineExecutionContext.appendStdout( + LogUtils.generateInfo(s" Your spark job exec with configs:\n${sb.toString()}") + ) + })(t => { + logger.warn("Get actual used resource exception", t) + }) + } + val response = Utils.tryFinally(runCode(this, _code, engineExecutorContext, jobGroup)) { // Utils.tryAndWarn(this.engineExecutionContext.pushProgress(1, getProgressInfo(""))) jobGroup = null @@ -169,7 +206,8 @@ abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) val newProgress = (engineExecutionContext.getCurrentParagraph * 1f - 1f) / engineExecutionContext.getTotalParagraph + JobProgressUtil .progress(sc, jobGroup) / engineExecutionContext.getTotalParagraph - val normalizedProgress = if (newProgress >= 1) newProgress - 0.1f else newProgress + val normalizedProgress = + if (newProgress >= 1) GovernanceCommonConf.FAKE_PROGRESS else newProgress val oldProgress = ProgressUtils.getOldProgress(this.engineExecutionContext) if (normalizedProgress < oldProgress) oldProgress else { diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala index 15907bac04..6635e149e8 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala @@ -31,7 +31,7 @@ import org.apache.linkis.engineplugin.spark.exception.{ SparkSessionNullException } import org.apache.linkis.engineplugin.spark.utils.EngineUtils -import org.apache.linkis.governance.common.paser.ScalaCodeParser +import org.apache.linkis.governance.common.paser.{EmptyCodeParser, ScalaCodeParser} import org.apache.linkis.scheduler.executer.{ ErrorExecuteResponse, ExecuteResponse, diff --git a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala index f25a1d55b0..ef222865fd 100644 --- a/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala +++ b/linkis-engineconn-plugins/spark/src/main/scala/org/apache/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala @@ -18,6 +18,7 @@ package org.apache.linkis.engineplugin.spark.launch import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.Logging import org.apache.linkis.engineplugin.spark.config.SparkConfiguration.{ ENGINE_JAR, SPARK_APP_NAME, @@ -42,7 +43,8 @@ import org.apache.commons.lang3.StringUtils import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer -class SparkSubmitProcessEngineConnLaunchBuilder(builder: JavaProcessEngineConnLaunchBuilder) { +class SparkSubmitProcessEngineConnLaunchBuilder(builder: JavaProcessEngineConnLaunchBuilder) + extends Logging { def getCommands( engineConnBuildRequest: EngineConnBuildRequest, @@ -67,7 +69,13 @@ class SparkSubmitProcessEngineConnLaunchBuilder(builder: JavaProcessEngineConnLa jars ++= getValueAndRemove(properties, SPARK_DEFAULT_EXTERNAL_JARS_PATH) .split(",") .filter(x => { - isNotBlankPath(x) && (new java.io.File(x)).isFile + val isPath = isNotBlankPath(x) + // filter by isFile cannot support this case: + // The cg-linkismanager startup user is inconsistent with the engineconn startup user + + // val isFile = (new java.io.File(x)).isFile + logger.info(s"file:${x}, check isPath:${isPath}") + isPath }) val pyFiles = getValueAndRemove(properties, "py-files", "").split(",").filter(isNotBlankPath) val archives = getValueAndRemove(properties, "archives", "").split(",").filter(isNotBlankPath) diff --git a/linkis-engineconn-plugins/sqoop/pom.xml b/linkis-engineconn-plugins/sqoop/pom.xml index 5428047fe6..8435ca1705 100644 --- a/linkis-engineconn-plugins/sqoop/pom.xml +++ b/linkis-engineconn-plugins/sqoop/pom.xml @@ -31,6 +31,24 @@ + + + com.lmax + disruptor + 3.4.0 + + + + org.apache.linkis + linkis-datasource-client + ${project.version} + + + org.apache.linkis + linkis-common + + + org.apache.commons commons-exec @@ -45,13 +63,7 @@ org.apache.avro avro - 1.10.2 - provided - - - com.lmax - disruptor - 3.4.3 + 1.11.0 provided diff --git a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java index e9cc6c6807..d0c0f29303 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java +++ b/linkis-engineconn-plugins/sqoop/src/main/java/org/apache/linkis/engineconnplugin/sqoop/client/config/ParamsMapping.java @@ -29,6 +29,12 @@ public final class ParamsMapping { static { String paramPrefix = SqoopParamsConfiguration.SQOOP_PARAM_PREFIX().getValue(); mapping = new HashMap<>(); + // start of DPM configuration + mapping.put(paramPrefix + "objectid", "--objectid"); + mapping.put(paramPrefix + "appid", "--appid"); + mapping.put(paramPrefix + "dk", "--dk"); + mapping.put(paramPrefix + "timestamp", "--timestamp"); + // end of DPM configuration mapping.put(paramPrefix + "connect", "--connect"); mapping.put(paramPrefix + "connection.manager", "--connection-manager"); mapping.put(paramPrefix + "connection.param.file", "--connection-param-file"); diff --git a/linkis-engineconn-plugins/sqoop/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/sqoop/src/main/resources/log4j2.xml index 9fd15a4b48..ac657912f8 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/sqoop/src/main/resources/log4j2.xml @@ -18,31 +18,36 @@ - - - - + + + + + + + + - + - + - + - + - + diff --git a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/client/RemoteClientHolder.scala b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/client/RemoteClientHolder.scala new file mode 100644 index 0000000000..ddcf0b866e --- /dev/null +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/client/RemoteClientHolder.scala @@ -0,0 +1,97 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.client + +import org.apache.linkis.common.conf.{CommonVars, Configuration} +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.datasource.client.{AbstractRemoteClient, DataSourceRemoteClient} +import org.apache.linkis.datasource.client.impl.LinkisDataSourceRemoteClient +import org.apache.linkis.engineconnplugin.sqoop.exception.DataSourceRpcErrorException +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} +import org.apache.linkis.httpclient.request.{Action, UserAction} + +import java.util.concurrent.TimeUnit + +/** + * To hold client for Linkis data source + */ +class RemoteClientHolder(user: String, system: String, clientConfig: DWSClientConfig) + extends Logging { + + def this(user: String, system: String) = { + this(user, system, RemoteClientHolder.CONFIG) + } + + def getDataSourceClient: DataSourceRemoteClient = { + Client.DATASOURCE + } + + def executeDataSource[T](action: Action): T = { + Utils.tryCatch { + // Try to invoke "setSystem" method + Utils.tryAndWarn { + Option(action.getClass.getMethod("setSystem", classOf[String])).foreach(method => { + method.setAccessible(true) + method.invoke(action, system) + }) + } + action match { + case action1: UserAction => + action1.setUser(user) + case _ => + } + getDataSourceClient.asInstanceOf[AbstractRemoteClient].execute(action).asInstanceOf[T] + } { case e: Exception => + throw new DataSourceRpcErrorException( + s"Fail to invoke action: " + + s"${JsonUtils.jackson.writer().writeValueAsString(action)}", + e + ) + } + } + + private object Client { + lazy val DATASOURCE: DataSourceRemoteClient = new LinkisDataSourceRemoteClient(clientConfig) + + } + +} + +object RemoteClientHolder { + + /** + * Default client configuration + */ + val CONFIG: DWSClientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(Configuration.getGateWayURL()) + .connectionTimeout(CommonVars[Long]("", 12).getValue) + .discoveryEnabled(CommonVars[Boolean]("", false).getValue) + .discoveryFrequency(CommonVars[Long]("", 1).getValue, TimeUnit.MINUTES) + .loadbalancerEnabled(CommonVars[Boolean]("", false).getValue) + .maxConnectionSize(CommonVars[Int]("", 1).getValue) + .retryEnabled(CommonVars[Boolean]("", true).getValue) + .readTimeout(CommonVars[Long]("", 12).getValue) + .setAuthenticationStrategy(new TokenAuthenticationStrategy()) + .setAuthTokenKey(CommonVars[String]("", "").getValue) + .setAuthTokenValue(CommonVars[String]("", "").getValue) + .setDWSVersion(Configuration.LINKIS_WEB_VERSION.getValue) + .build() + +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminateToken.java b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/exception/DataSourceRpcErrorException.scala similarity index 69% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminateToken.java rename to linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/exception/DataSourceRpcErrorException.scala index e8d81a430d..be091b841b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/TerminateToken.java +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/exception/DataSourceRpcErrorException.scala @@ -15,18 +15,19 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.job; +package org.apache.linkis.engineconnplugin.sqoop.exception -import java.util.concurrent.atomic.AtomicBoolean; +import org.apache.linkis.common.exception.ErrorException -public class TerminateToken { - private AtomicBoolean shouldTerminate = new AtomicBoolean(false); +/** + * RpcError Exception + */ +class DataSourceRpcErrorException(errCode: Int, desc: String) + extends ErrorException(errCode: Int, desc: String) { - public boolean shouldTerminate() { - return shouldTerminate.get(); + def this(errorMsg: String, t: Throwable) = { + this(16024, errorMsg) + initCause(t) } - public void setTerminate() { - shouldTerminate.set(true); - } } diff --git a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala index 9b09d6b613..37096dcda6 100644 --- a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/executor/SqoopOnceCodeExecutor.scala @@ -36,7 +36,11 @@ import org.apache.linkis.engineconnplugin.sqoop.context.SqoopResourceConfigurati LINKIS_SQOOP_TASK_MAP_CPU_CORES, LINKIS_SQOOP_TASK_MAP_MEMORY } -import org.apache.linkis.engineconnplugin.sqoop.params.SqoopParamsResolver +import org.apache.linkis.engineconnplugin.sqoop.params.{ + ConnectParamsResolver, + SqoopDataSourceParamsResolver, + SqoopParamsResolver +} import org.apache.linkis.manager.common.entity.resource._ import org.apache.linkis.manager.engineplugin.common.conf.EngineConnPluginConf import org.apache.linkis.protocol.engine.JobProgressInfo @@ -55,7 +59,9 @@ class SqoopOnceCodeExecutor( private var params: util.Map[String, String] = _ private var future: Future[_] = _ private var daemonThread: Future[_] = _ - private val paramsResolvers: Array[SqoopParamsResolver] = Array() + + private val paramsResolvers: Array[SqoopParamsResolver] = + Array(new SqoopDataSourceParamsResolver, new ConnectParamsResolver) override def doSubmit( onceExecutorExecutionContext: OnceExecutorExecutionContext, diff --git a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/ConnectParamsResolver.scala b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/ConnectParamsResolver.scala new file mode 100644 index 0000000000..881bc3c098 --- /dev/null +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/ConnectParamsResolver.scala @@ -0,0 +1,82 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.params + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.datasourcemanager.common.util.json.Json +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.sqoop.context.SqoopParamsConfiguration + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Connect param + */ +class ConnectParamsResolver extends SqoopParamsResolver with Logging { + + /** + * main method build connect from host,port,and for example: + * --connect "jdbc:mysql://{host}:{port}/xxx?{params}" + * @param params + * input + * @return + */ + override def resolve( + params: util.Map[String, String], + context: EngineCreationContext + ): util.Map[String, String] = { + info(s"Invoke resolver: ${this.getClass.getSimpleName}") + Option(params.get(SqoopParamsConfiguration.SQOOP_PARAM_CONNECT.getValue)).foreach(connect => { + val newConnectStr = params.asScala.foldLeft(connect) { + case (newConnect, kv) => { + var paramKey = kv._1 + var paramValue = kv._2 + if (Option(paramKey).isDefined && Option(paramValue).isDefined) { + if (paramKey.equals(SqoopParamsConfiguration.SQOOP_PARAM_CONNECT_PARAMS.getValue)) { + Utils.tryQuietly { + val connectMap: util.Map[String, String] = Json.fromJson( + paramValue, + classOf[util.Map[String, String]], + classOf[String], + classOf[String] + ) + paramValue = connectMap.asScala.foldLeft("") { case (connectStr, mapItem) => + val item = s"${mapItem._1}=${mapItem._2}" + if (StringUtils.isNotBlank(connectStr)) connectStr + "&" + item else item + } + } + } + if (paramKey.startsWith(SqoopParamsConfiguration.SQOOP_PARAM_PREFIX.getValue)) { + paramKey = + paramKey.substring(SqoopParamsConfiguration.SQOOP_PARAM_PREFIX.getValue.length) + } + newConnect.replace(s"{$paramKey}", paramValue) + } else newConnect + } + } + info(s"connect string => $newConnectStr") + params.put(SqoopParamsConfiguration.SQOOP_PARAM_CONNECT.getValue, newConnectStr) + }) + params + } + +} diff --git a/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopDataSourceParamsResolver.scala b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopDataSourceParamsResolver.scala new file mode 100644 index 0000000000..6e52fda04d --- /dev/null +++ b/linkis-engineconn-plugins/sqoop/src/main/scala/org/apache/linkis/engineconnplugin/sqoop/params/SqoopDataSourceParamsResolver.scala @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.engineconnplugin.sqoop.params + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.datasource.client.DataSourceRemoteClient +import org.apache.linkis.datasource.client.request.GetConnectParamsByDataSourceNameAction +import org.apache.linkis.datasource.client.response.GetConnectParamsByDataSourceNameResult +import org.apache.linkis.datasourcemanager.common.protocol.{DsInfoQueryRequest, DsInfoResponse} +import org.apache.linkis.engineconn.common.creation.EngineCreationContext +import org.apache.linkis.engineconnplugin.sqoop.client.RemoteClientHolder +import org.apache.linkis.engineconnplugin.sqoop.context.{ + SqoopEnvConfiguration, + SqoopParamsConfiguration +} +import org.apache.linkis.engineconnplugin.sqoop.exception.DataSourceRpcErrorException +import org.apache.linkis.rpc.Sender + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ + +/** + * connect the data source manager instance + */ +class SqoopDataSourceParamsResolver extends SqoopParamsResolver with Logging { + + /** + * main method fetch the connect params from datasource and then set into map + * @param params + * input + * @return + */ + override def resolve( + params: util.Map[String, String], + context: EngineCreationContext + ): util.Map[String, String] = { + info(s"Invoke resolver: ${this.getClass.getSimpleName}") + Option(params.get(SqoopParamsConfiguration.SQOOP_PARAM_DATA_SOURCE.getValue)).foreach( + dataSourceName => { + Option(rpcCall(dataSourceName)).foreach(response => { + response.params.asScala.foreach { + case (paramKey, paramValue) => + params.put( + s"${SqoopParamsConfiguration.SQOOP_PARAM_PREFIX.getValue}$paramKey", + String.valueOf(Option(paramValue).getOrElse("")) + ) + case _ => + } + if (response.params.isEmpty) { + warn( + s" Note: params from data source [$dataSourceName] is empty, have you published it ?" + ) + } + info(s"Fetch ${response.params.size} params from data source [${dataSourceName}]") + }) + } + ) + params + } + + def clientCall(dataSource: String, user: String): util.Map[String, Any] = { + val clientHolder = new RemoteClientHolder(user, "sqoop") + val client: DataSourceRemoteClient = clientHolder.getDataSourceClient + Utils.tryFinally { + var result: GetConnectParamsByDataSourceNameResult = null + try { + // Fetch the connect params by data source and username + val action = GetConnectParamsByDataSourceNameAction + .builder() + .setDataSourceName(dataSource) + .setSystem("sqoop") + .setUser(user) + .build() + result = clientHolder.executeDataSource(action) + } catch { + case e: Exception => + throw new DataSourceRpcErrorException( + "Unable to access to the data source server in client", + e + ) + } + result + } { client.close() } match { + case result: GetConnectParamsByDataSourceNameResult => result.connectParams + case _ => + throw new DataSourceRpcErrorException( + "Empty response from data source server in client", + null + ) + } + } + + def rpcCall(dataSourceName: String): DsInfoResponse = { + val sender = Sender.getSender(SqoopEnvConfiguration.LINKIS_DATASOURCE_SERVICE_NAME.getValue) + var rpcResult: Any = null + try { + rpcResult = sender.ask(new DsInfoQueryRequest(null, dataSourceName, "sqoop")) + } catch { + case e: Exception => + throw new DataSourceRpcErrorException( + s"Send rpc to data source service [${SqoopEnvConfiguration.LINKIS_DATASOURCE_SERVICE_NAME.getValue}] error", + e + ) + } + rpcResult match { + case response: DsInfoResponse => + if (!response.status) { + throw new DataSourceRpcErrorException( + "Exception happened in data source manager server, please check the log in instance", + null + ) + } + val dsType = response.dsType + if (StringUtils.isBlank(dsType)) { + throw new DataSourceRpcErrorException( + s"Data source type cannot be null for [$dataSourceName], creator:[${response.creator}]", + null + ) + } + response + case _ => null + } + } + +} diff --git a/linkis-engineconn-plugins/trino/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/trino/src/main/resources/log4j2.xml index 9cf70f16c1..8c3997f3f2 100644 --- a/linkis-engineconn-plugins/trino/src/main/resources/log4j2.xml +++ b/linkis-engineconn-plugins/trino/src/main/resources/log4j2.xml @@ -20,16 +20,16 @@ - + - + - + diff --git a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala index e5c6f5186a..cfeb943f01 100644 --- a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala +++ b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/conf/TrinoConfiguration.scala @@ -57,6 +57,8 @@ object TrinoConfiguration { val TRINO_FORBID_GRANT = CommonVars[Boolean]("linkis.trino.forbid.grant", true) + val TRINO_FORBID_FETCHFIRST = CommonVars[Boolean]("linkis.trino.forbid.fetchfirst", true) + val TRINO_FORBID_MODIFY_SCHEMA = CommonVars[Boolean]("linkis.trino.forbid.modifySchema", true) diff --git a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala index a3a0d0d900..1e3fc25ede 100644 --- a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala +++ b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/executor/TrinoEngineConnExecutor.scala @@ -19,12 +19,14 @@ package org.apache.linkis.engineplugin.trino.executor import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{OverloadUtils, Utils} +import org.apache.linkis.engineconn.acessible.executor.listener.event.TaskLogUpdateEvent import org.apache.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant} import org.apache.linkis.engineconn.computation.executor.execute.{ ConcurrentComputationExecutor, EngineExecutionContext } import org.apache.linkis.engineconn.core.EngineConnObject +import org.apache.linkis.engineconn.executor.listener.ExecutorListenerBusContext import org.apache.linkis.engineplugin.trino.conf.TrinoConfiguration._ import org.apache.linkis.engineplugin.trino.conf.TrinoEngineConfig import org.apache.linkis.engineplugin.trino.exception.{ @@ -400,14 +402,21 @@ class TrinoEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) } else { results = statement.finalStatusInfo() } + // v407中create table时columns返回为null if (results.getColumns == null) { - throw new RuntimeException("trino columns is null.") + // throw new RuntimeException("trino columns is null.") + logger.info(s"results columns is null for task: $taskId") + return } + val columns = results.getColumns.asScala .map(column => Column(column.getName, column.getType, "")) .toArray[Column] + // 兼容结果集中colums为空集合的情况,如use db, drop table if exists + if (columns.length != 0) { + resultSetWriter.addMetaData(new TableMetaData(columns)) + } columnCount = columns.length - resultSetWriter.addMetaData(new TableMetaData(columns)) while (statement.isRunning) { val data = statement.currentData().getData if (data != null) for (row <- data.asScala) { @@ -527,6 +536,19 @@ class TrinoEngineConnExecutor(override val outputPrintLimit: Int, val id: Int) } override def close(): Unit = { + val taskIds = statementClientCache.keySet().iterator() + val lbs = ExecutorListenerBusContext.getExecutorListenerBusContext() + while (taskIds.hasNext) { + val taskId = taskIds.next() + Utils.tryAndWarn( + lbs.getEngineConnSyncListenerBus.postToAll( + TaskLogUpdateEvent( + taskId, + LogUtils.generateERROR("EC exits unexpectedly and actively kills the task") + ) + ) + ) + } killAll() super.close() } diff --git a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/utils/TrinoCode.scala b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/utils/TrinoCode.scala index 6329969259..42c9bcf461 100644 --- a/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/utils/TrinoCode.scala +++ b/linkis-engineconn-plugins/trino/src/main/scala/org/apache/linkis/engineplugin/trino/utils/TrinoCode.scala @@ -19,6 +19,7 @@ package org.apache.linkis.engineplugin.trino.utils import org.apache.linkis.engineplugin.trino.conf.TrinoConfiguration import org.apache.linkis.engineplugin.trino.exception.{ + TrinoClientException, TrinoGrantmaException, TrinoModifySchemaException } @@ -39,6 +40,14 @@ object TrinoCode { .contains("alter schema") } + private def hasFetchFirst(code: String): Boolean = { + val trimmedCode = code + .replaceAll("\n", SPACE) + .replaceAll("\\s+", SPACE) + .toLowerCase() + trimmedCode.matches(".*fetch first (\\d+ )?row only.*") + } + def checkCode(code: String): Unit = { if ( TrinoConfiguration.TRINO_FORBID_MODIFY_SCHEMA.getValue && TrinoCode.willModifySchema(code) @@ -48,6 +57,9 @@ object TrinoCode { if (TrinoConfiguration.TRINO_FORBID_GRANT.getValue && TrinoCode.willGrant(code)) { throw TrinoGrantmaException("Grant schema or table is not allowed") } + if (TrinoConfiguration.TRINO_FORBID_FETCHFIRST.getValue && TrinoCode.hasFetchFirst(code)) { + throw TrinoClientException("Fetch first row only is not allowed, please use limit n.") + } } } diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala index f5fc6d5500..ab404dba6f 100644 --- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala +++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala @@ -34,6 +34,7 @@ import scala.collection.mutable.ArrayBuffer */ class TaskPlannerTransform extends PlannerTransform with Logging { + @Deprecated def rebuildTreeNode(tmpTask: Task): Task = { tmpTask.getChildren.foreach(child => { val newParents = child.getParents.clone() :+ tmpTask @@ -42,6 +43,7 @@ class TaskPlannerTransform extends PlannerTransform with Logging { tmpTask } + @Deprecated def buildCodeLogicTaskTree( codeLogicalUnit: CodeLogicalUnit = null, stage: Stage, @@ -54,6 +56,7 @@ class TaskPlannerTransform extends PlannerTransform with Logging { (rebuildTreeNode(codeLogicalUnitTaskTmp), newStartJobTask) } + @Deprecated def buildStageTaskTree(taskDesc: StageTaskDesc, startJobTask: Task = null): (Task, Task) = { taskDesc match { case endStageTask: EndStageTaskDesc => @@ -102,6 +105,7 @@ class TaskPlannerTransform extends PlannerTransform with Logging { } } + @Deprecated def buildAllStageTaskTree( stages: Array[Stage], startJobTask: Task = null @@ -117,6 +121,7 @@ class TaskPlannerTransform extends PlannerTransform with Logging { (stageTasks.toArray, reusedStartJobTask) } + @Deprecated def buildJobTaskTree(taskDesc: TaskDesc): Task = { taskDesc match { case startTask: StartJobTaskDesc => @@ -136,18 +141,29 @@ class TaskPlannerTransform extends PlannerTransform with Logging { override def apply(in: Job, context: ASTContext): Task = { in match { case job: CodeJob => - // TODO rebuild needed: Notice( Stages maybe have dependency relation.) - // TODO This class should be split into two kind of transforms. - // TODO First, two PlannerTransforms are needed: one to transform Job to JobTaskEnd, one to transform Job to StageTaskEnd. - // TODO Second, AnalyzeTransforms are needed: one for adding a computationTask by stage for no computation strategy, - // one to transform Job to JobTaskStart, one to transform Job to StageTaskStart. - buildJobTaskTree(EndJobTaskDesc(job)) + val taskDesc = EndJobTaskDesc(job) + val jobTaskTmp = + new JobTask(Array(), Array(buildCodeLogicTaskTree(job.getCodeLogicalUnit, job))) + jobTaskTmp.setTaskDesc(taskDesc) + rebuildNewTreeNode(jobTaskTmp) case _ => logger.error(s"unknown job type:${in.getClass} ") null } } + def rebuildNewTreeNode(tmpTask: Task): Task = { + tmpTask.getChildren.foreach(_.withNewParents(Array(tmpTask))) + tmpTask + } + + def buildCodeLogicTaskTree(codeLogicalUnit: CodeLogicalUnit, job: Job): Task = { + val codeLogicalUnitTaskTmp = new CodeLogicalUnitTask(Array(), Array()) + codeLogicalUnitTaskTmp.setTaskDesc(CodeLogicalUnitTaskDesc(job)) + if (codeLogicalUnit != null) codeLogicalUnitTaskTmp.setCodeLogicalUnit(codeLogicalUnit) + codeLogicalUnitTaskTmp + } + override def getName: String = { val className = getClass.getName if (className endsWith "$") className.dropRight(1) else className diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala index 8f95172fbd..af75d5c56f 100644 --- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala +++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala @@ -132,7 +132,7 @@ class DefaultCodeExecTaskExecutorManager extends CodeExecTaskExecutorManager wit .getIDInfo()} mark id is ${mark.getMarkId()}, it may take several seconds, please wait") ) ) - val engineConnExecutor = engineConnManager.getAvailableEngineConnExecutor(mark) + val engineConnExecutor = engineConnManager.getAvailableEngineConnExecutor(mark, execTask) if (null == engineConnExecutor) { return null } diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala index 521205927a..101914724c 100644 --- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala +++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala @@ -17,8 +17,7 @@ package org.apache.linkis.orchestrator.computation.service -import org.apache.linkis.common.log.LogUtils -import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.common.utils.Logging import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.governance.common.protocol.task._ import org.apache.linkis.manager.common.protocol.resource.ResponseTaskRunningInfo @@ -28,6 +27,7 @@ import org.apache.linkis.orchestrator.computation.monitor.EngineConnMonitor import org.apache.linkis.orchestrator.core.ResultSet import org.apache.linkis.orchestrator.ecm.service.TaskExecutionReceiver import org.apache.linkis.orchestrator.listener.task._ +import org.apache.linkis.orchestrator.utils.OrchestratorLoggerUtils import org.apache.linkis.rpc.Sender import org.apache.linkis.rpc.message.annotation.Receiver import org.apache.linkis.rpc.utils.RPCUtils @@ -91,6 +91,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin codeExecTaskExecutorManager .getByEngineConnAndTaskId(serviceInstance, taskStatus.execId) .foreach { codeExecutor => + OrchestratorLoggerUtils.setJobIdMDC(codeExecutor.getExecTask) val event = TaskStatusEvent(codeExecutor.getExecTask, taskStatus.status) logger.info( s"From engineConn receive status info:$taskStatus, now post to listenerBus event: $event" @@ -111,6 +112,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin ) } } + OrchestratorLoggerUtils.removeJobIdMDC() } @Receiver @@ -123,6 +125,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin codeExecTaskExecutorManager .getByEngineConnAndTaskId(serviceInstance, taskResultSize.execId) .foreach { codeExecutor => + OrchestratorLoggerUtils.setJobIdMDC(codeExecutor.getExecTask) val event = TaskResultSetSizeEvent(codeExecutor.getExecTask, taskResultSize.resultSize) logger.info( s"From engineConn receive resultSet size info$taskResultSize, now post to listenerBus event: $event" @@ -134,6 +137,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin if (!isExist) { logger.warn(s"from $serviceInstance received $taskResultSize cannot find execTask to deal") } + OrchestratorLoggerUtils.removeJobIdMDC() } @Receiver @@ -143,6 +147,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin codeExecTaskExecutorManager .getByEngineConnAndTaskId(serviceInstance, taskResultSet.execId) .foreach { codeExecutor => + OrchestratorLoggerUtils.setJobIdMDC(codeExecutor.getExecTask) val event = TaskResultSetEvent( codeExecutor.getExecTask, ResultSet(taskResultSet.output, taskResultSet.alias) @@ -157,6 +162,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin if (!isExist) { logger.warn(s"from $serviceInstance received $taskResultSet cannot find execTask to deal") } + OrchestratorLoggerUtils.removeJobIdMDC() } @Receiver @@ -166,6 +172,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin codeExecTaskExecutorManager .getByEngineConnAndTaskId(serviceInstance, responseTaskError.execId) .foreach { codeExecutor => + OrchestratorLoggerUtils.setJobIdMDC(codeExecutor.getExecTask) val event = TaskErrorResponseEvent(codeExecutor.getExecTask, responseTaskError.errorMsg) logger.info( s"From engineConn receive responseTaskError info${responseTaskError.execId}, now post to listenerBus event: ${event.execTask.getIDInfo()}" @@ -177,7 +184,7 @@ class ComputationTaskExecutionReceiver extends TaskExecutionReceiver with Loggin if (!isExist) { logger.warn(s"from $serviceInstance received $responseTaskError cannot find execTask to deal") } - + OrchestratorLoggerUtils.removeJobIdMDC() } } diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/ComputationEngineConnManager.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/ComputationEngineConnManager.scala index a4f4099178..9f2d424c9d 100644 --- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/ComputationEngineConnManager.scala +++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/ComputationEngineConnManager.scala @@ -19,6 +19,7 @@ package org.apache.linkis.orchestrator.ecm import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.exception.LinkisRetryException +import org.apache.linkis.common.log.LogUtils import org.apache.linkis.common.utils.{ByteTimeUtils, Logging, Utils} import org.apache.linkis.governance.common.conf.GovernanceCommonConf import org.apache.linkis.manager.common.entity.node.EngineNode @@ -29,6 +30,7 @@ import org.apache.linkis.manager.common.protocol.engine.{ EngineCreateSuccess } import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.orchestrator.computation.physical.CodeLogicalUnitExecTask import org.apache.linkis.orchestrator.ecm.cache.EngineAsyncResponseCache import org.apache.linkis.orchestrator.ecm.conf.ECMPluginConf import org.apache.linkis.orchestrator.ecm.entity.{DefaultMark, Mark, MarkReq, Policy} @@ -38,6 +40,7 @@ import org.apache.linkis.orchestrator.ecm.service.impl.{ ComputationConcurrentEngineConnExecutor, ComputationEngineConnExecutor } +import org.apache.linkis.orchestrator.listener.task.TaskLogEvent import org.apache.linkis.rpc.Sender import org.apache.commons.lang3.exception.ExceptionUtils @@ -77,7 +80,8 @@ class ComputationEngineConnManager extends AbstractEngineConnManager with Loggin override protected def askEngineConnExecutor( engineAskRequest: EngineAskRequest, - mark: Mark + mark: Mark, + execTask: CodeLogicalUnitExecTask ): EngineConnExecutor = { engineAskRequest.setTimeOut(getEngineConnApplyTime) var count = getEngineConnApplyAttempts() @@ -86,7 +90,8 @@ class ComputationEngineConnManager extends AbstractEngineConnManager with Loggin count = count - 1 val start = System.currentTimeMillis() try { - val (engineNode, reuse) = getEngineNodeAskManager(engineAskRequest, mark) + val (engineNode, reuse) = + getEngineNodeAskManager(engineAskRequest, mark, execTask) if (null != engineNode) { val engineConnExecutor = if ( @@ -110,6 +115,7 @@ class ComputationEngineConnManager extends AbstractEngineConnManager with Loggin s"${mark.getMarkId()} Failed to askEngineAskRequest time taken ($taken), ${t.getMessage}" ) retryException = t + // add isCrossClusterRetryException flag case t: Throwable => val taken = ByteTimeUtils.msDurationToString(System.currentTimeMillis - start) logger.warn(s"${mark.getMarkId()} Failed to askEngineAskRequest time taken ($taken)") @@ -128,7 +134,8 @@ class ComputationEngineConnManager extends AbstractEngineConnManager with Loggin private def getEngineNodeAskManager( engineAskRequest: EngineAskRequest, - mark: Mark + mark: Mark, + execTask: CodeLogicalUnitExecTask ): (EngineNode, Boolean) = { val response = Utils.tryCatch(getManagerSender().ask(engineAskRequest)) { t: Throwable => val baseMsg = s"mark ${mark.getMarkId()} failed to ask linkis Manager Can be retried " @@ -143,6 +150,7 @@ class ComputationEngineConnManager extends AbstractEngineConnManager with Loggin throw t } } + response match { case engineNode: EngineNode => logger.debug(s"Succeed to reuse engineNode $engineNode mark ${mark.getMarkId()}") @@ -152,6 +160,9 @@ class ComputationEngineConnManager extends AbstractEngineConnManager with Loggin "{} received EngineAskAsyncResponse id: {} serviceInstance: {}", Array(mark.getMarkId(), id, serviceInstance): _* ) + execTask.getPhysicalContext.pushLog( + TaskLogEvent(execTask, LogUtils.generateInfo(s"Request LinkisManager:${response}")) + ) cacheMap.getAndRemove( id, Duration(engineAskRequest.getTimeOut + 100000, TimeUnit.MILLISECONDS) diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/EngineConnManager.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/EngineConnManager.scala index de996a3532..416d1363ee 100644 --- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/EngineConnManager.scala +++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/EngineConnManager.scala @@ -21,6 +21,7 @@ import org.apache.linkis.common.ServiceInstance import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.manager.common.protocol.engine.EngineAskRequest import org.apache.linkis.manager.label.constant.LabelKeyConstant +import org.apache.linkis.orchestrator.computation.physical.CodeLogicalUnitExecTask import org.apache.linkis.orchestrator.ecm.conf.ECMPluginConf import org.apache.linkis.orchestrator.ecm.entity.{Mark, MarkReq, Policy} import org.apache.linkis.orchestrator.ecm.exception.ECMPluginErrorException @@ -59,7 +60,10 @@ trait EngineConnManager { * @param mark * @return */ - def getAvailableEngineConnExecutor(mark: Mark): EngineConnExecutor + def getAvailableEngineConnExecutor( + mark: Mark, + execTask: CodeLogicalUnitExecTask + ): EngineConnExecutor /** * Remove the engineConn related to the Mark Release lock and other information @@ -121,7 +125,10 @@ abstract class AbstractEngineConnManager extends EngineConnManager with Logging override def getMarkCache(): util.Map[Mark, util.List[ServiceInstance]] = markCache - override def getAvailableEngineConnExecutor(mark: Mark): EngineConnExecutor = { + override def getAvailableEngineConnExecutor( + mark: Mark, + execTask: CodeLogicalUnitExecTask + ): EngineConnExecutor = { logger.info(s"mark ${mark.getMarkId()} start to getAvailableEngineConnExecutor") if (null != mark) { tryReuseEngineConnExecutor(mark) match { @@ -129,7 +136,7 @@ abstract class AbstractEngineConnManager extends EngineConnManager with Logging case None => } val engineConnExecutor = - askEngineConnExecutor(mark.getMarkReq.createEngineConnAskReq(), mark) + askEngineConnExecutor(mark.getMarkReq.createEngineConnAskReq(), mark, execTask) engineConnExecutor.useEngineConn saveToMarkCache(mark, engineConnExecutor) logger.debug( @@ -233,7 +240,8 @@ abstract class AbstractEngineConnManager extends EngineConnManager with Logging protected def askEngineConnExecutor( engineAskRequest: EngineAskRequest, - mark: Mark + mark: Mark, + execTask: CodeLogicalUnitExecTask ): EngineConnExecutor override def releaseMark(mark: Mark): Unit = { diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala index 622c8813b5..d36b548cd2 100644 --- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala +++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/org/apache/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala @@ -22,6 +22,7 @@ import org.apache.linkis.common.utils.Logging import org.apache.linkis.manager.label.constant.LabelKeyConstant import org.apache.linkis.manager.label.entity.engine.ReuseExclusionLabel import org.apache.linkis.manager.label.entity.entrance.{BindEngineLabel, LoadBalanceLabel} +import org.apache.linkis.orchestrator.computation.physical.CodeLogicalUnitExecTask import org.apache.linkis.orchestrator.ecm.conf.ECMPluginConf import org.apache.linkis.orchestrator.ecm.entity._ import org.apache.linkis.orchestrator.ecm.exception.ECMPluginErrorException @@ -153,8 +154,10 @@ class LoadBalanceLabelEngineConnManager extends ComputationEngineConnManager wit } } - override def getAvailableEngineConnExecutor(mark: Mark): EngineConnExecutor = { - + override def getAvailableEngineConnExecutor( + mark: Mark, + execTask: CodeLogicalUnitExecTask + ): EngineConnExecutor = { if (null != mark && getMarkCache().containsKey(mark)) { tryReuseEngineConnExecutor(mark) match { case Some(engineConnExecutor) => @@ -174,7 +177,7 @@ class LoadBalanceLabelEngineConnManager extends ComputationEngineConnManager wit reuseExclusionLabel.getValue ) } - val engineConnExecutor = askEngineConnExecutor(engineConnAskReq, mark) + val engineConnExecutor = askEngineConnExecutor(engineConnAskReq, mark, execTask) saveToMarkCache(mark, engineConnExecutor) logger.debug( s"mark ${mark.getMarkId()} Finished to getAvailableEngineConnExecutor by create" diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala index d546889b67..459bf7f38b 100644 --- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala +++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala @@ -17,20 +17,16 @@ package org.apache.linkis.orchestrator.strategy.async -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.governance.common.entity.ExecutionNodeStatus import org.apache.linkis.orchestrator.conf.OrchestratorConfiguration import org.apache.linkis.orchestrator.core.ResultSet import org.apache.linkis.orchestrator.exception.OrchestratorErrorCodeSummary -import org.apache.linkis.orchestrator.execution.{ArrayResultSetTaskResponse, _} +import org.apache.linkis.orchestrator.execution._ import org.apache.linkis.orchestrator.execution.impl.{ DefaultFailedTaskResponse, DefaultResultSetTaskResponse } -import org.apache.linkis.orchestrator.listener.{ - OrchestratorListenerBusContext, - OrchestratorSyncListenerBus -} import org.apache.linkis.orchestrator.listener.execution.ExecTaskRunnerCompletedEvent import org.apache.linkis.orchestrator.plans.physical.ExecTask import org.apache.linkis.orchestrator.strategy.{ @@ -38,6 +34,7 @@ import org.apache.linkis.orchestrator.strategy.{ ResultSetExecTask, StatusInfoExecTask } +import org.apache.linkis.orchestrator.utils.OrchestratorLoggerUtils import scala.collection.mutable.ArrayBuffer @@ -66,25 +63,26 @@ class AsyncExecTaskRunnerImpl(override val task: ExecTask) override def isSucceed: Boolean = ExecutionNodeStatus.isScheduled(status) - override def run(): Unit = try { - logger.info(s"ExecTaskRunner Submit execTask(${task.getIDInfo}) to running") - val response = task.execute() - this.taskResponse = response - response match { - case async: AsyncTaskResponse => - transientStatus(ExecutionNodeStatus.Running) - case succeed: SucceedTaskResponse => - logger.info(s"Succeed to execute ExecTask(${task.getIDInfo})") - transientStatus(ExecutionNodeStatus.Succeed) - case failedTaskResponse: FailedTaskResponse => - logger.info(s"Failed to execute ExecTask(${task.getIDInfo})") - transientStatus(ExecutionNodeStatus.Failed) - case retry: RetryTaskResponse => - logger.warn(s"ExecTask(${task.getIDInfo}) need to retry") - transientStatus(ExecutionNodeStatus.WaitForRetry) - } - } catch { - case e: Throwable => + override def run(): Unit = { + Utils.tryCatch { + OrchestratorLoggerUtils.setJobIdMDC(task) + logger.info(s"ExecTaskRunner Submit execTask(${task.getIDInfo}) to running") + val response = task.execute() + this.taskResponse = response + response match { + case async: AsyncTaskResponse => + transientStatus(ExecutionNodeStatus.Running) + case succeed: SucceedTaskResponse => + logger.info(s"Succeed to execute ExecTask(${task.getIDInfo})") + transientStatus(ExecutionNodeStatus.Succeed) + case failedTaskResponse: FailedTaskResponse => + logger.info(s"Failed to execute ExecTask(${task.getIDInfo})") + transientStatus(ExecutionNodeStatus.Failed) + case retry: RetryTaskResponse => + logger.warn(s"ExecTask(${task.getIDInfo}) need to retry") + transientStatus(ExecutionNodeStatus.WaitForRetry) + } + } { case e: Throwable => logger.error(s"Failed to execute task ${task.getIDInfo}", e) this.taskResponse = new DefaultFailedTaskResponse( e.getMessage, @@ -92,6 +90,8 @@ class AsyncExecTaskRunnerImpl(override val task: ExecTask) e ) transientStatus(ExecutionNodeStatus.Failed) + } + OrchestratorLoggerUtils.removeJobIdMDC() } override def transientStatus(status: ExecutionNodeStatus): Unit = { diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/utils/OrchestratorLoggerUtils.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/utils/OrchestratorLoggerUtils.scala new file mode 100644 index 0000000000..e249c675bb --- /dev/null +++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/org/apache/linkis/orchestrator/utils/OrchestratorLoggerUtils.scala @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.orchestrator.utils + +import org.apache.linkis.governance.common.utils.LoggerUtils +import org.apache.linkis.orchestrator.plans.physical.ExecTask + +object OrchestratorLoggerUtils { + + def setJobIdMDC(task: ExecTask): Unit = { + val startUpMap = + task.getTaskDesc.getOrigin.getASTOrchestration.getASTContext.getParams.getStartupParams + if (null != startUpMap) { + LoggerUtils.setJobIdMDC(startUpMap.getConfigurationMap()) + } + } + + def removeJobIdMDC(): Unit = { + LoggerUtils.removeJobIdMDC() + } + +} diff --git a/linkis-orchestrator/pom.xml b/linkis-orchestrator/pom.xml index afc94da056..f424c38285 100644 --- a/linkis-orchestrator/pom.xml +++ b/linkis-orchestrator/pom.xml @@ -21,6 +21,7 @@ org.apache.linkis linkis ${revision} + ../pom.xml linkis-orchestrator diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/StdoutDisplayData.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java similarity index 73% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/StdoutDisplayData.java rename to linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java index e332256417..ded1d7e434 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/display/data/StdoutDisplayData.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/conf/UdfTreeConf.java @@ -15,16 +15,12 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present.display.data; +package org.apache.linkis.basedatamanager.server.conf; -public class StdoutDisplayData implements DisplayData { - private String content; +import org.apache.linkis.common.conf.CommonVars; - public StdoutDisplayData(String content) { - this.content = content; - } +public class UdfTreeConf { - public String getContent() { - return content; - } + public static final CommonVars UDF_FUN_SYSTEM_CATEGORY = + CommonVars.apply("linkis.udf.fun.system.category", "user_name,sys,expire,share,bdp"); } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/SyncBackendJob.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java similarity index 64% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/SyncBackendJob.java rename to linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java index d18b81b91d..c2d585993b 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/job/SyncBackendJob.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/dao/UdfBaseInfoMapper.java @@ -15,15 +15,15 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.job; +package org.apache.linkis.basedatamanager.server.dao; -import org.apache.linkis.cli.common.entity.job.Job; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; + +import com.baomidou.mybatisplus.core.mapper.BaseMapper; /** - * Backend only supports sync-submission, i.e. submit and wait till job finish and get result in one - * call, then implement this interface. + * @description Database operation Mapper for the linkis_PS_UDF_tree table + * @createDate 2022-08-13 15:13:27 @Entity + * org.apache.linkis.basedatamanager.server.domain.LinkisPsUdfTree */ -public interface SyncBackendJob extends Job { - void submitAndGetResult() throws LinkisClientRuntimeException; -} +public interface UdfBaseInfoMapper extends BaseMapper {} diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java new file mode 100644 index 0000000000..0c8776ed26 --- /dev/null +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfBaseInfoEntity.java @@ -0,0 +1,205 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.domain; + +import java.io.Serializable; +import java.util.Date; +import java.util.Objects; + +import com.baomidou.mybatisplus.annotation.IdType; +import com.baomidou.mybatisplus.annotation.TableField; +import com.baomidou.mybatisplus.annotation.TableId; +import com.baomidou.mybatisplus.annotation.TableName; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +/** @TableName linkis_ps_udf_baseinfo */ +@TableName(value = "linkis_ps_udf_baseinfo") +@JsonIgnoreProperties(ignoreUnknown = true) +public class UdfBaseInfoEntity implements Serializable { + + @TableId(type = IdType.AUTO) + private Long id; + + private String createUser; + private String udfName; + private Integer udfType; + private Boolean isExpire; + private Boolean isShared; + private Long treeId; + private Date createTime; + private Date updateTime; + private String sys; + private String clusterName; + + @TableField(exist = false) + private static final long serialVersionUID = 1L; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getCreateUser() { + return createUser; + } + + public void setCreateUser(String createUser) { + this.createUser = createUser; + } + + public String getUdfName() { + return udfName; + } + + public void setUdfName(String udfName) { + this.udfName = udfName; + } + + public Integer getUdfType() { + return udfType; + } + + public void setUdfType(Integer udfType) { + this.udfType = udfType; + } + + public Boolean getExpire() { + return isExpire; + } + + public void setExpire(Boolean expire) { + isExpire = expire; + } + + public Boolean getShared() { + return isShared; + } + + public void setShared(Boolean shared) { + isShared = shared; + } + + public Long getTreeId() { + return treeId; + } + + public void setTreeId(Long treeId) { + this.treeId = treeId; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getSys() { + return sys; + } + + public void setSys(String sys) { + this.sys = sys; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + UdfBaseInfoEntity that = (UdfBaseInfoEntity) o; + return Objects.equals(id, that.id) + && Objects.equals(createUser, that.createUser) + && Objects.equals(udfName, that.udfName) + && Objects.equals(udfType, that.udfType) + && Objects.equals(isExpire, that.isExpire) + && Objects.equals(isShared, that.isShared) + && Objects.equals(treeId, that.treeId) + && Objects.equals(createTime, that.createTime) + && Objects.equals(updateTime, that.updateTime) + && Objects.equals(sys, that.sys) + && Objects.equals(clusterName, that.clusterName); + } + + @Override + public int hashCode() { + return Objects.hash( + id, + createUser, + udfName, + udfType, + isExpire, + isShared, + treeId, + createTime, + updateTime, + sys, + clusterName); + } + + @Override + public String toString() { + return "UdfBaseInfoEntity{" + + "id=" + + id + + ", createUser='" + + createUser + + '\'' + + ", udfName='" + + udfName + + '\'' + + ", udfType=" + + udfType + + ", isExpire=" + + isExpire + + ", isShared=" + + isShared + + ", treeId=" + + treeId + + ", createTime=" + + createTime + + ", updateTime=" + + updateTime + + ", sys='" + + sys + + '\'' + + ", clusterName='" + + clusterName + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java index fa6db837e1..8c5abbeec1 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfManagerEntity.java @@ -18,6 +18,7 @@ package org.apache.linkis.basedatamanager.server.domain; import java.io.Serializable; +import java.util.Date; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; @@ -36,6 +37,12 @@ public class UdfManagerEntity implements Serializable { /** */ private String userName; + /** */ + private Date createTime; + + /** */ + private Date updateTime; + @TableField(exist = false) private static final long serialVersionUID = 1L; @@ -59,6 +66,22 @@ public void setUserName(String userName) { this.userName = userName; } + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + @Override public boolean equals(Object that) { if (this == that) { diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java index 9d2a51d471..32615f095f 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/domain/UdfTreeEntity.java @@ -19,6 +19,7 @@ import java.io.Serializable; import java.util.Date; +import java.util.List; import com.baomidou.mybatisplus.annotation.IdType; import com.baomidou.mybatisplus.annotation.TableField; @@ -58,6 +59,9 @@ public class UdfTreeEntity implements Serializable { @TableField(exist = false) private static final long serialVersionUID = 1L; + @TableField(exist = false) + private List childrenList; + /** */ public Long getId() { return id; @@ -138,6 +142,14 @@ public void setCategory(String category) { this.category = category; } + public List getChildrenList() { + return childrenList; + } + + public void setChildrenList(List childrenList) { + this.childrenList = childrenList; + } + @Override public boolean equals(Object that) { if (this == that) { diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java index 3c215a831b..1a24b36fd4 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/ErrorCodeRestfulApi.java @@ -89,8 +89,12 @@ public Message add(HttpServletRequest request, @RequestBody ErrorCodeEntity erro @ApiOperation(value = "remove", notes = "Remove an Error Code by id", httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser( - request, "Remove a Datasource Code Record,id:" + id.toString()); + String username = + ModuleUserUtils.getOperationUser( + request, "Try to remove error code record with id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = errorCodeService.removeById(id); return Message.ok("").data("result", result); } diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java index 2b86e5bb43..7d5668c074 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/GatewayAuthTokenRestfulApi.java @@ -127,7 +127,12 @@ public Message update(HttpServletRequest request, @RequestBody GatewayAuthTokenE httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser(request, "Remove a Gateway Auth Token Record,id:" + id); + String username = + ModuleUserUtils.getOperationUser( + request, "Try to remove gateway auto token record with id:" + id); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = gatewayAuthTokenService.removeById(id); return Message.ok("").data("result", result); } diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java index 5575eca20b..8b06b2e63f 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/RmExternalResourceProviderRestfulApi.java @@ -112,8 +112,13 @@ public Message add( httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser( - request, "Remove a Resource manager External Resource Provider Record,id:" + id.toString()); + String username = + ModuleUserUtils.getOperationUser( + request, + "Try to remove resource external resource provider record with id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = rmExternalResourceProviderService.removeById(id); return Message.ok("").data("result", result); } diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java index f6e684f837..588b0b6a26 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfManagerRestfulApi.java @@ -32,6 +32,8 @@ import javax.servlet.http.HttpServletRequest; +import java.util.Date; + import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.github.pagehelper.PageInfo; import io.swagger.annotations.Api; @@ -89,10 +91,12 @@ public Message add(HttpServletRequest request, @RequestBody UdfManagerEntity udf new QueryWrapper<>(udfManagerEntity).eq("user_name", udfManagerEntity.getUserName()); UdfManagerEntity udfManager = udfManagerService.getOne(queryWrapper); if (udfManager == null) { + udfManagerEntity.setCreateTime(new Date()); + udfManagerEntity.setUpdateTime(new Date()); boolean result = udfManagerService.save(udfManagerEntity); return Message.ok("").data("result", result); } else { - return Message.error("The username already exists,Please add again!"); + return Message.error("The " + udfManager.getUserName() + " already exists,Please add again!"); } } @@ -103,7 +107,12 @@ public Message add(HttpServletRequest request, @RequestBody UdfManagerEntity udf httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser(request, "Remove a UDF Manager Record,id:" + id.toString()); + String username = + ModuleUserUtils.getOperationUser( + request, "Remove a UDF Manager Record,id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } boolean result = udfManagerService.removeById(id); return Message.ok("").data("result", result); } @@ -121,7 +130,16 @@ public Message update( if (!Configuration.isAdmin(username)) { return Message.error("User '" + username + "' is not admin user[非管理员用户]"); } - boolean result = udfManagerService.updateById(udfManagerEntity); - return Message.ok("").data("result", result); + QueryWrapper queryWrapper = new QueryWrapper(); + queryWrapper.eq("user_name", udfManagerEntity.getUserName()); + UdfManagerEntity udfManager = udfManagerService.getOne(queryWrapper); + if (udfManager == null) { + udfManagerEntity.setUpdateTime(new Date()); + boolean result = udfManagerService.updateById(udfManagerEntity); + return Message.ok("").data("result", result); + } else { + return Message.error( + "The " + udfManager.getUserName() + " already exists,Please update again!"); + } } } diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java index e5c9cbc442..7b8f434236 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/restful/UdfTreeRestfulApi.java @@ -17,12 +17,18 @@ package org.apache.linkis.basedatamanager.server.restful; +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; import org.apache.linkis.basedatamanager.server.domain.UdfTreeEntity; +import org.apache.linkis.basedatamanager.server.service.UdfBaseInfoService; import org.apache.linkis.basedatamanager.server.service.UdfTreeService; +import org.apache.linkis.basedatamanager.server.utils.UdfTreeUtils; import org.apache.linkis.common.conf.Configuration; import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; +import org.apache.commons.lang3.StringUtils; + import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; @@ -32,8 +38,10 @@ import javax.servlet.http.HttpServletRequest; +import java.util.ArrayList; import java.util.List; +import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.github.pagehelper.PageInfo; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -47,6 +55,8 @@ public class UdfTreeRestfulApi { @Autowired UdfTreeService udfTreeService; + @Autowired UdfBaseInfoService udfBaseinfoService; + @ApiImplicitParams({ @ApiImplicitParam(paramType = "query", dataType = "string", name = "searchName"), @ApiImplicitParam(paramType = "query", dataType = "int", name = "currentPage"), @@ -62,12 +72,26 @@ public Message list( return Message.ok("").data("list", pageList); } + @ApiImplicitParams({ + @ApiImplicitParam(paramType = "query", dataType = "string", name = "searchName"), + @ApiImplicitParam(paramType = "query", dataType = "string", name = "category") + }) @ApiOperation(value = "all", notes = "Query all data of UDF Tree", httpMethod = "GET") @RequestMapping(path = "/all", method = RequestMethod.GET) - public Message all(HttpServletRequest request, String searchName) { + public Message all(HttpServletRequest request, String searchName, String category) { ModuleUserUtils.getOperationUser( request, "Query all data of UDF Tree,search name:" + searchName); - List udfTreeEntityList = udfTreeService.list(); + List udfTreeEntityList = new ArrayList<>(); + if (StringUtils.isNotBlank(searchName) && StringUtils.isNotBlank(category)) { + UdfTreeEntity entity = new UdfTreeEntity(); + entity.setCategory(category); + entity.setUserName(searchName); + QueryWrapper queryWrapper = + new QueryWrapper<>(entity) + .eq("user_name", entity.getUserName()) + .eq("category", entity.getCategory()); + udfTreeEntityList = new UdfTreeUtils(udfTreeService.list(queryWrapper)).buildTree(); + } return Message.ok("").data("list", udfTreeEntityList); } @@ -100,9 +124,27 @@ public Message add(HttpServletRequest request, @RequestBody UdfTreeEntity udfTre @ApiOperation(value = "remove", notes = "Remove a UDF Tree Record by id", httpMethod = "DELETE") @RequestMapping(path = "/{id}", method = RequestMethod.DELETE) public Message remove(HttpServletRequest request, @PathVariable("id") Long id) { - ModuleUserUtils.getOperationUser(request, "Remove a UDF Tree Record,id:" + id.toString()); - boolean result = udfTreeService.removeById(id); - return Message.ok("").data("result", result); + String username = + ModuleUserUtils.getOperationUser(request, "Remove a UDF Tree Record,id:" + id.toString()); + if (!Configuration.isAdmin(username)) { + return Message.error("User '" + username + "' is not admin user[非管理员用户]"); + } + UdfTreeEntity entity = udfTreeService.getById(id); + if (null != entity && entity.getParent() == -1) { + return Message.error("The root directory is forbidden to delete[\"根目录禁止删除\"]"); + } + QueryWrapper queryWrapper = + new QueryWrapper<>(new UdfTreeEntity()).eq("parent", id); + List folderList = udfTreeService.list(queryWrapper); + QueryWrapper udfQueryWrapper = + new QueryWrapper<>(new UdfBaseInfoEntity()).eq("tree_id", id); + List functoinList = udfBaseinfoService.list(udfQueryWrapper); + if (CollectionUtils.isEmpty(folderList) && CollectionUtils.isEmpty(functoinList)) { + boolean result = udfTreeService.removeById(id); + return Message.ok("").data("result", result); + } else { + return Message.error("Please delete the subdirectory first[请先删除子目录]"); + } } @ApiImplicitParams({ diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionStatusEnum.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java similarity index 67% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionStatusEnum.java rename to linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java index 0c4375b3d1..032c818b43 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/interactor/result/ExecutionStatusEnum.java +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/service/UdfBaseInfoService.java @@ -15,20 +15,14 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.interactor.result; +package org.apache.linkis.basedatamanager.server.service; -import org.apache.linkis.cli.common.entity.result.ExecutionStatus; +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; -public enum ExecutionStatusEnum implements ExecutionStatus { - UNDEFINED("Inited", 1), - SUCCEED("Succeed", 2), - FAILED("Failed", 3); +import com.baomidou.mybatisplus.extension.service.IService; - private String name; - private int id; - - ExecutionStatusEnum(String name, int id) { - this.name = name; - this.id = id; - } -} +/** + * @description Database operation Service for the [linkis_ps_udf_baseinfo] table + * @createDate 2022-08-13 15:13:27 + */ +public interface UdfBaseInfoService extends IService {} diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java new file mode 100644 index 0000000000..c3c62854a1 --- /dev/null +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/service/impl/UdfBaseInfoServicelmpl.java @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.service.impl; + +import org.apache.linkis.basedatamanager.server.dao.UdfBaseInfoMapper; +import org.apache.linkis.basedatamanager.server.domain.UdfBaseInfoEntity; +import org.apache.linkis.basedatamanager.server.service.UdfBaseInfoService; + +import org.springframework.stereotype.Service; + +import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl; + +@Service +public class UdfBaseInfoServicelmpl extends ServiceImpl + implements UdfBaseInfoService {} diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java new file mode 100644 index 0000000000..f7f2b19fca --- /dev/null +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/java/org/apache/linkis/basedatamanager/server/utils/UdfTreeUtils.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.basedatamanager.server.utils; + +import org.apache.linkis.basedatamanager.server.domain.UdfTreeEntity; + +import java.util.ArrayList; +import java.util.List; + +public class UdfTreeUtils { + + /** Build tree structure */ + public List udfTreeList = new ArrayList<>(); + + /** Construction method */ + public UdfTreeUtils(List udfTreeList) { + this.udfTreeList = udfTreeList; + } + + /** + * Obtain all root nodes (top-level nodes) that need to be built + * + * @return All Root Node List Collection + */ + public List getRootNode() { + // Save all root nodes (data for all root nodes) + List rootudfTreeList = new ArrayList<>(); + // UdfTreeEntity: Each piece of data (node) found in the query + for (UdfTreeEntity UdfTreeEntity : udfTreeList) { + // Determine whether the current node is a root node. Note here that if the parentId type is + // String, the equals() method should be used to determine. + if (-1 == UdfTreeEntity.getParent()) { + rootudfTreeList.add(UdfTreeEntity); + } + } + return rootudfTreeList; + } + + /** + * Build a tree structure according to each top-level node (root node) + * + * @return Build the entire tree + */ + public List buildTree() { + // UdfTreeEntities: Saves the complete tree structure constructed by a top-level node + List UdfTreeEntitys = new ArrayList(); + // GetRootNode(): Get all root nodes + for (UdfTreeEntity treeRootNode : getRootNode()) { + // Build subtrees from top-level nodes + treeRootNode = buildChildTree(treeRootNode); + // Complete the tree structure constructed by a top-level node and add it in + UdfTreeEntitys.add(treeRootNode); + } + return UdfTreeEntitys; + } + + /** + * Recursion ----- construct sub tree structure + * + * @param udfTreeEntity Root node (top-level node) + * @return Whole tree + */ + public UdfTreeEntity buildChildTree(UdfTreeEntity udfTreeEntity) { + List childTree = new ArrayList(); + // udfTreeList:All node sets (all data) + for (UdfTreeEntity UdfTreeEntity : udfTreeList) { + // Determine whether the parent node ID of the current node is equal to the ID of the root + // node, that is, if the current node is a child node under it + if (UdfTreeEntity.getParent().equals(udfTreeEntity.getId())) { + // Recursively judge the current node's situation and call its own method + childTree.add(buildChildTree(UdfTreeEntity)); + } + } + // Recursively judge the current node's situation and call its own method + udfTreeEntity.setChildrenList(childTree); + return udfTreeEntity; + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml b/linkis-public-enhancements/linkis-basedata-manager/src/main/resources/mapper/common/UdfBaseInfoMapper.xml similarity index 64% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml rename to linkis-public-enhancements/linkis-basedata-manager/src/main/resources/mapper/common/UdfBaseInfoMapper.xml index 4cf0817130..30069a71f5 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml +++ b/linkis-public-enhancements/linkis-basedata-manager/src/main/resources/mapper/common/UdfBaseInfoMapper.xml @@ -6,24 +6,19 @@ ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at - ~ + ~ ~ http://www.apache.org/licenses/LICENSE-2.0 - ~ + ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> - - 4.0.0 + + - - org.apache.linkis - linkis-cli - ${revision} - - linkis-cli-common - jar - + diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/create.sql b/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/create.sql index 88c4ab9a2a..74783c2dc2 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/create.sql @@ -128,12 +128,13 @@ CREATE TABLE `linkis_cg_rm_external_resource_provider` DROP TABLE IF EXISTS `linkis_ps_udf_manager`; -CREATE TABLE `linkis_ps_udf_manager` -( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `user_name` varchar(20) DEFAULT NULL, - PRIMARY KEY (`id`) -); +CREATE TABLE `linkis_ps_udf_manager` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_tree`; CREATE TABLE `linkis_ps_udf_tree` @@ -231,4 +232,20 @@ CREATE TABLE `linkis_cg_engine_conn_plugin_bml_resources` `create_time` datetime NOT NULL COMMENT 'created time', `last_update_time` datetime NOT NULL COMMENT 'updated time', PRIMARY KEY (`id`) -); \ No newline at end of file +); + +DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; +CREATE TABLE `linkis_ps_udf_baseinfo` ( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `create_user` varchar(50) NOT NULL, + `udf_name` varchar(255) NOT NULL, + `udf_type` int(11) DEFAULT '0', + `tree_id` bigint(20) NOT NULL, + `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + `sys` varchar(255) NOT NULL DEFAULT 'ide' COMMENT 'source system', + `cluster_name` varchar(255) NOT NULL, + `is_expire` bit(1) DEFAULT NULL, + `is_shared` bit(1) DEFAULT NULL, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql b/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql index 680c530762..ed3b9f57a5 100644 --- a/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql +++ b/linkis-public-enhancements/linkis-basedata-manager/src/test/resources/data.sql @@ -214,3 +214,10 @@ INSERT INTO `linkis_ps_configuration_key_engine_relation` (`id`, `config_key_id` INSERT INTO `linkis_ps_configuration_key_engine_relation` (`id`, `config_key_id`, `engine_type_label_id`) VALUES (51, 48, 11); INSERT INTO `linkis_ps_configuration_key_engine_relation` (`id`, `config_key_id`, `engine_type_label_id`) VALUES (52, 49, 11); INSERT INTO `linkis_ps_configuration_key_engine_relation` (`id`, `config_key_id`, `engine_type_label_id`) VALUES (53, 50, 11); + +DELETE FROM linkis_ps_udf_baseinfo; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name,is_expire,is_shared) VALUES + ('hadoop','pyUdfTest',1,14,'2022-09-08 11:43:20','2022-09-08 11:43:20','IDE','all',NULL,NULL), + ('hadoop','jarUdf',0,14,'2022-09-08 14:53:56','2022-09-08 14:53:56','IDE','all',NULL,NULL), + ('hadoop','test',3,13,'2022-09-08 14:54:30','2022-09-08 14:54:30','IDE','all',NULL,NULL), + ('hadoop','scalaUdf1',4,13,'2022-09-08 14:55:57','2022-09-08 14:55:57','IDE','all',NULL,NULL); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterConfiguration.scala b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterConfiguration.scala new file mode 100644 index 0000000000..f6cd36f69c --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterConfiguration.scala @@ -0,0 +1,27 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.conf + +import org.apache.linkis.common.conf.CommonVars + +object AcrossClusterConfiguration { + + val ACROSS_CLUSTER_QUEUE_SUFFIX = + CommonVars.apply("linkis.configuration.across.cluster.queue.suffix", "_bdap2bdp").getValue + +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java new file mode 100644 index 0000000000..dc10d5b02d --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.AcrossClusterRule; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface AcrossClusterRuleMapper { + + AcrossClusterRule getAcrossClusterRule(@Param("id") Long id); + + void deleteAcrossClusterRule(@Param("creator") String creator, @Param("user") String user); + + void updateAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); + + void insertAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); + + List queryAcrossClusterRuleList( + @Param("user") String user, + @Param("creator") String creator, + @Param("clusterName") String clusterName); + + void validAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobData.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java similarity index 50% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobData.java rename to linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java index 3d24570bf2..0993b2cbed 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/src/main/java/org/apache/linkis/cli/application/interactor/job/data/LinkisJobData.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java @@ -15,26 +15,26 @@ * limitations under the License. */ -package org.apache.linkis.cli.application.interactor.job.data; +package org.apache.linkis.configuration.dao; -import org.apache.linkis.cli.application.operator.ujes.LinkisOperResultAdapter; -import org.apache.linkis.cli.common.entity.job.JobData; +import org.apache.linkis.configuration.entity.ConfigKeyLimitForUser; +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; -public interface LinkisJobData extends JobData, Cloneable { +import org.apache.ibatis.annotations.Param; - String getExecID(); +import java.util.List; - float getJobProgress(); +/** for table linkis_ps_configuration_key_limit_for_user @Description */ +public interface ConfigKeyLimitForUserMapper { - Integer getErrCode(); + int batchInsertList(List list); - String getErrDesc(); + int updateByPrimaryKey(ConfigKeyLimitForUser configKeyLimitForUser); - boolean isSuccess(); + int batchInsertOrUpdateList(List list); - void setSuccess(boolean success); + List selectByLabelAndKeyIds( + @Param("label") String label, @Param("keyIdList") List keyIdList); - void updateByOperResult(LinkisOperResultAdapter adapter); - - LinkisJobData clone() throws CloneNotSupportedException; + ConfigKeyLimitVo selectByLabelAndKeyId(@Param("label") String label, @Param("keyId") Long keyId); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java index 6b6b15a65c..ee5506d9eb 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java @@ -17,10 +17,7 @@ package org.apache.linkis.configuration.dao; -import org.apache.linkis.configuration.entity.CategoryLabel; -import org.apache.linkis.configuration.entity.ConfigKey; -import org.apache.linkis.configuration.entity.ConfigKeyValue; -import org.apache.linkis.configuration.entity.ConfigValue; +import org.apache.linkis.configuration.entity.*; import org.apache.ibatis.annotations.Param; @@ -28,19 +25,14 @@ public interface ConfigMapper { - List getConfigByEngineUserCreator( - @Param("engineType") String engineType, - @Param("creator") String creator, - @Param("userName") String userName); - List getConfigKeyByLabelIds(@Param("ids") List ids); List getConfigKeyValueByLabelId(@Param("labelId") Integer labelId); - Long selectAppIDByAppName(@Param("name") String appName); - void insertValue(ConfigValue configValue); + int batchInsertOrUpdateValueList(List list); + ConfigValue getConfigValueById(@Param("id") Long id); ConfigValue getConfigValueByKeyAndLabel(ConfigValue configValue); @@ -57,9 +49,14 @@ List getConfigByEngineUserCreator( List selectKeyByKeyName(@Param("keyName") String keyName); - List listKeyByStringValue(@Param("stringValue") String stringValue); + List selectKeyByEngineType(@Param("engineType") String engineType); + + List selectKeyByEngineTypeAndKeyList( + @Param("engineType") String engineType, @Param("keyList") List keyList); - void insertCreator(String creator); + List selectKeyByKeyIdList(@Param("keyIdList") List keyList); + + List listKeyByStringValue(@Param("stringValue") String stringValue); List getCategory(); @@ -74,4 +71,21 @@ List getConfigByEngineUserCreator( void insertKey(ConfigKey key); List getConfigEnKeyValueByLabelId(@Param("labelId") Integer labelId); + + void deleteConfigKey(@Param("id") Integer id); + + List getConfigBykey(@Param("engineType") String engineType, @Param("key") String key); + + List getConfigEnBykey( + @Param("engineType") String engineType, @Param("key") String key); + + List getUserConfigValue( + @Param("key") String key, + @Param("user") String user, + @Param("creator") String creator, + @Param("engineType") String engineType); + + void insertKeyByBase(ConfigKey configKey); + + void updateConfigKey(ConfigKey configKey); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java index d199134b4b..1a513e3352 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java @@ -28,8 +28,13 @@ public interface LabelMapper { ConfigLabel getLabelByKeyValue( @Param("labelKey") String labelKey, @Param("stringValue") String stringValue); + // label key:combined_userCreator_engineType + List selectUserCreatorEngineTypeLabelList(@Param("itemList") List itemList); + void insertLabel(ConfigLabel label); + void batchInsertLabel(@Param("labelList") List labelList); + void deleteLabel(@Param("ids") List ids); ConfigLabel getLabelById(@Param("id") Integer id); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java new file mode 100644 index 0000000000..6acdd6b825 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.TemplateConfigKey; +import org.apache.linkis.configuration.entity.TemplateConfigKeyVO; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** The dao interface class of the linkis_ps_configuration_template_config_key table @Description */ +public interface TemplateConfigKeyMapper { + + int batchInsertList(List list); + + List selectListByTemplateUuid(@Param("templateUuid") String templateUuid); + + int deleteByTemplateUuidAndKeyIdList( + @Param("templateUuid") String templateUuid, @Param("keyIdList") List KeyIdList); + + int batchInsertOrUpdateList(List list); + + List selectListByTemplateUuidList( + @Param("templateUuidList") List templateUuidList); + + List selectInfoListByTemplateUuid( + @Param("templateUuid") String templateUuid); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java new file mode 100644 index 0000000000..3ea0c22d47 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +public class AcrossClusterRule { + + private Long id; + private String clusterName; + private String creator; + private String user; + private Date createTime; + private String createBy; + private Date updateTime; + private String updateBy; + private String rules; + private String isValid; + + public AcrossClusterRule() {} + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public String getRules() { + return rules; + } + + public void setRules(String rules) { + this.rules = rules; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + @Override + public String toString() { + return "AcrossClusterRule{" + + "id=" + + id + + ", clusterName='" + + clusterName + + '\'' + + ", creator='" + + creator + + '\'' + + ", user='" + + user + + '\'' + + ", createTime=" + + createTime + + ", createBy='" + + createBy + + '\'' + + ", updateTime=" + + updateTime + + ", updateBy='" + + updateBy + + '\'' + + ", rules='" + + rules + + '\'' + + ", isValid='" + + isValid + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java index 1e26252a7c..e1ae25d425 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java @@ -17,6 +17,9 @@ package org.apache.linkis.configuration.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) public class ConfigKey { private Long id; @@ -43,6 +46,20 @@ public class ConfigKey { private String treeName; + /* + 0 none + 1 with mix + 2 with max + 3 min and max both + */ + private Integer boundaryType; + + private String enName; + + private String enDescription; + + private String enTreeName; + public String getEngineType() { return engineType; } @@ -138,4 +155,76 @@ public Integer getLevel() { public void setLevel(Integer level) { this.level = level; } + + public Integer getBoundaryType() { + return boundaryType; + } + + public void setBoundaryType(Integer boundaryType) { + this.boundaryType = boundaryType; + } + + public String getEnName() { + return enName; + } + + public void setEnName(String enName) { + this.enName = enName; + } + + public String getEnDescription() { + return enDescription; + } + + public void setEnDescription(String enDescription) { + this.enDescription = enDescription; + } + + public String getEnTreeName() { + return enTreeName; + } + + public void setEnTreeName(String enTreeName) { + this.enTreeName = enTreeName; + } + + @Override + public String toString() { + return "ConfigKey{" + + "id=" + + id + + ", key='" + + key + + '\'' + + ", description='" + + description + + '\'' + + ", name='" + + name + + '\'' + + ", engineType='" + + engineType + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", validateType='" + + validateType + + '\'' + + ", validateRange='" + + validateRange + + '\'' + + ", isAdvanced=" + + isAdvanced + + ", isHidden=" + + isHidden + + ", level=" + + level + + ", treeName='" + + treeName + + '\'' + + ", boundaryType=" + + boundaryType + + '}'; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java new file mode 100644 index 0000000000..a626f32255 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +/** for table linkis_ps_configuration_key_limit_for_user @Description */ +public class ConfigKeyLimitForUser { + + /** Table field: id Field type: bigint(19) */ + private Long id; + + /** Username table field: user_name field type: varchar(50) */ + private String userName; + + /** + * combined label combined_userCreator_engineType such as hadoop-IDE, spark-2.4.3 table field: + * combined_label_value field type: varchar(200) + */ + private String combinedLabelValue; + + /** id of linkis_ps_configuration_config_key table field: key_id field type: bigint(19) */ + private Long keyId; + + /** Configuration value table field: config_value field type: varchar(200) */ + private String configValue; + + /** Upper limit table field: max_value field type: varchar(50) */ + private String maxValue; + + /** Lower limit value (reserved) table field: min_value field type: varchar(50) */ + private String minValue; + + /** + * uuid The template id table field of the third-party record: latest_update_template_uuid Field + * type: varchar(34) + */ + private String latestUpdateTemplateUuid; + + /** Is it valid Reserved Y/N table field: is_valid field type: varchar(2) */ + private String isValid; + + /** Creator table field: create_by field type: varchar(50) */ + private String createBy; + + /** + * create time table field: create_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date createTime; + + /** Updater table field: update_by field type: varchar(50) */ + private String updateBy; + + /** + * update time table field: update_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date updateTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public String getCombinedLabelValue() { + return combinedLabelValue; + } + + public void setCombinedLabelValue(String combinedLabelValue) { + this.combinedLabelValue = combinedLabelValue; + } + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getMinValue() { + return minValue; + } + + public void setMinValue(String minValue) { + this.minValue = minValue; + } + + public String getLatestUpdateTemplateUuid() { + return latestUpdateTemplateUuid; + } + + public void setLatestUpdateTemplateUuid(String latestUpdateTemplateUuid) { + this.latestUpdateTemplateUuid = latestUpdateTemplateUuid; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(" ["); + sb.append("Hash = ").append(hashCode()); + sb.append(", id=").append(id); + sb.append(", userName=").append(userName); + sb.append(", combinedLabelValue=").append(combinedLabelValue); + sb.append(", keyId=").append(keyId); + sb.append(", configValue=").append(configValue); + sb.append(", maxValue=").append(maxValue); + sb.append(", minValue=").append(minValue); + sb.append(", latestUpdateTemplateUuid=").append(latestUpdateTemplateUuid); + sb.append(", isValid=").append(isValid); + sb.append(", createBy=").append(createBy); + sb.append(", createTime=").append(createTime); + sb.append(", updateBy=").append(updateBy); + sb.append(", updateTime=").append(updateTime); + sb.append(']'); + return sb.toString(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentWayImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java similarity index 52% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentWayImpl.java rename to linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java index 6fbe4e1ec0..c612168713 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentWayImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java @@ -15,37 +15,50 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present; +package org.apache.linkis.configuration.entity; -import org.apache.linkis.cli.common.entity.present.PresentWay; +public class ConfigKeyLimitVo { -public class PresentWayImpl implements PresentWay { + /** id : bigint(19) */ + private Long keyId; - private PresentMode mode; - private String path; - private boolean displayMetaAndLogo = true; + private String key; - public String getPath() { - return path; + /** config_value varchar(200) */ + private String configValue; + + /** max_value varchar(50) */ + private String maxValue; + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; } - public void setPath(String path) { - this.path = path; + public void setConfigValue(String configValue) { + this.configValue = configValue; } - public boolean isDisplayMetaAndLogo() { - return displayMetaAndLogo; + public String getMaxValue() { + return maxValue; } - public void setDisplayMetaAndLogo(boolean displayMetaAndLogo) { - this.displayMetaAndLogo = displayMetaAndLogo; + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; } - public PresentMode getMode() { - return mode; + public String getKey() { + return key; } - public void setMode(PresentMode mode) { - this.mode = mode; + public void setKey(String key) { + this.key = key; } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java index 143566218c..19266bc691 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java @@ -17,6 +17,8 @@ package org.apache.linkis.configuration.entity; +import java.util.Map; + public class ConfigKeyValue { private Long id; @@ -53,6 +55,16 @@ public class ConfigKeyValue { private Boolean isUserDefined; + private Map specialLimit; + + public Map getSpecialLimit() { + return specialLimit; + } + + public void setSpecialLimit(Map specialLimit) { + this.specialLimit = specialLimit; + } + public Boolean getIsUserDefined() { return isUserDefined; } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java new file mode 100644 index 0000000000..273828ff02 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class ConfigUserValue { + + private String key; + + private String name; + // linkis_ps_configuration_config_key id + private Integer configKeyId; + + private String description; + + private String defaultValue; + + private String engineType; + // linkis_ps_configuration_config_value id + private Integer configValueId; + + private String configValue; + // linkis_cg_manager_label id + private Integer configLabelId; + + private String labelValue; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public Integer getConfigKeyId() { + return configKeyId; + } + + public void setConfigKeyId(Integer configKeyId) { + this.configKeyId = configKeyId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public Integer getConfigValueId() { + return configValueId; + } + + public void setConfigValueId(Integer configValueId) { + this.configValueId = configValueId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public Integer getConfigLabelId() { + return configLabelId; + } + + public void setConfigLabelId(Integer configLabelId) { + this.configLabelId = configLabelId; + } + + public String getLabelValue() { + return labelValue; + } + + public void setLabelValue(String labelValue) { + this.labelValue = labelValue; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public String toString() { + return "ConfigUserValue{" + + "key='" + + key + + '\'' + + ", name='" + + name + + '\'' + + ", configKeyId=" + + configKeyId + + ", description='" + + description + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", engineType='" + + engineType + + '\'' + + ", configValueId=" + + configValueId + + ", configValue='" + + configValue + + '\'' + + ", configLabelId=" + + configLabelId + + ", labelValue='" + + labelValue + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java new file mode 100644 index 0000000000..b29b3742f2 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +/** The entity class of the linkis_ps_configuration_template_config_key table @Description */ +public class TemplateConfigKey { + + /** Table field: id Field type: bigint(19) */ + private Long id; + + /** + * Configuration template name redundant storage table field: template_name field type: + * varchar(200) + */ + private String templateName; + + /** + * uuid The template id table field of the third-party record: template_uuid Field type: + * varchar(34) + */ + private String templateUuid; + + /** id of linkis_ps_configuration_config_key table field: key_id field type: bigint(19) */ + private Long keyId; + + /** Configuration value table field: config_value field type: varchar(200) */ + private String configValue; + + /** Upper limit table field: max_value field type: varchar(50) */ + private String maxValue; + + /** Lower limit value (reserved) table field: min_value field type: varchar(50) */ + private String minValue; + + /** Validation regularity (reserved) table field: validate_range field type: varchar(50) */ + private String validateRange; + + /** Is it valid Reserved Y/N table field: is_valid field type: varchar(2) */ + private String isValid; + + /** Creator table field: create_by field type: varchar(50) */ + private String createBy; + + /** + * create time table field: create_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date createTime; + + /** Updater table field: update_by field type: varchar(50) */ + private String updateBy; + + /** + * update time table field: update_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date updateTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getMinValue() { + return minValue; + } + + public void setMinValue(String minValue) { + this.minValue = minValue; + } + + public String getValidateRange() { + return validateRange; + } + + public void setValidateRange(String validateRange) { + this.validateRange = validateRange; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(" ["); + sb.append("Hash = ").append(hashCode()); + sb.append(", id=").append(id); + sb.append(", templateName=").append(templateName); + sb.append(", templateUuid=").append(templateUuid); + sb.append(", keyId=").append(keyId); + sb.append(", configValue=").append(configValue); + sb.append(", maxValue=").append(maxValue); + sb.append(", minValue=").append(minValue); + sb.append(", validateRange=").append(validateRange); + sb.append(", isValid=").append(isValid); + sb.append(", createBy=").append(createBy); + sb.append(", createTime=").append(createTime); + sb.append(", updateBy=").append(updateBy); + sb.append(", updateTime=").append(updateTime); + sb.append(']'); + return sb.toString(); + } +} diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/validate/Validator.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java similarity index 77% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/validate/Validator.java rename to linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java index c86e475248..796a90fa63 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/validate/Validator.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java @@ -15,10 +15,17 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.validate; +package org.apache.linkis.configuration.entity; -import org.apache.linkis.cli.common.exception.LinkisClientRuntimeException; +public class TemplateConfigKeyVO extends TemplateConfigKey { -public interface Validator { - void doValidation(Object input) throws LinkisClientRuntimeException; + private String key; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java index 05ec8046fd..77d2c67576 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java @@ -19,10 +19,12 @@ import java.util.Date; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @ApiModel +@JsonIgnoreProperties(ignoreUnknown = true) public class UserIpVo { @ApiModelProperty("id") diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentModeImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java similarity index 72% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentModeImpl.java rename to linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java index aee032e3a4..79bff7cae9 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/src/main/java/org/apache/linkis/cli/core/present/PresentModeImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java @@ -15,22 +15,27 @@ * limitations under the License. */ -package org.apache.linkis.cli.core.present; +package org.apache.linkis.configuration.enumeration; -public enum PresentModeImpl implements PresentMode { - STDOUT("stdout", 0), - TEXT_FILE("text_file", 1); +public enum BoundaryTypeEnum { + /* + 0 none + 1 with mix + 2 with max + 3 min and max both + */ + NONE(0), + WITH_MIX(1), + WITH_MAX(2), + WITH_BOTH(3); - private String name; - private int id; + private Integer id; - PresentModeImpl(String name, int id) { - this.name = name; + BoundaryTypeEnum(Integer id) { this.id = id; } - @Override - public String getName() { - return this.name(); + public Integer getId() { + return this.id; } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java index 3f58930ea1..e00b1499ea 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java @@ -41,7 +41,7 @@ public enum LinkisConfigurationErrorCodeSummary implements LinkisErrorCode { 14100, "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)"), INCOMPLETE_RECONFIRM(14100, "Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)"), - ONLY_ADMIN_CAN_MODIFY(14100, "Only admin can modify category(只有管理员才能修改目录)"), + ONLY_ADMIN_PERFORM(14100, "Only admin have permission to perform this operation(限管理员执行此操作)"), THE_LABEL_PARAMETER_IS_EMPTY(14100, " The label parameter is empty(标签参数为空)"), ERROR_VALIDATOR_RANGE(14100, "Error validator range!(错误验证器范围!)"), TYPE_OF_LABEL_NOT_SUPPORTED(14100, "This type of label is not supported:{0}(不支持这种类型的标签:{0})"); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java new file mode 100644 index 0000000000..a0ae390576 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java @@ -0,0 +1,327 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.restful.api; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.configuration.entity.AcrossClusterRule; +import org.apache.linkis.configuration.service.AcrossClusterRuleService; +import org.apache.linkis.configuration.util.CommonUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; + +import java.util.Map; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "across cluster rule api") +@RestController +@RequestMapping(path = "/configuration/acrossClusterRule") +public class AcrossClusterRuleRestfulApi { + + @Autowired private AcrossClusterRuleService acrossClusterRuleService; + + private Logger log = LoggerFactory.getLogger(this.getClass()); + + @ApiOperation( + value = "valid acrossClusterRule", + notes = "valid acrossClusterRule", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "id", dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + }) + @RequestMapping(path = "/isValid", method = RequestMethod.PUT) + public Message isValidRule(HttpServletRequest req, @RequestBody Map json) { + String username = ModuleUserUtils.getOperationUser(req, "execute valid acrossClusterRule"); + if (!Configuration.isAdmin(username)) { + return Message.error( + "Failed to valid acrossClusterRule List,msg: only administrators can configure"); + } + + Integer idInt = (Integer) json.get("id"); + Long id = idInt.longValue(); + String isValid = (String) json.get("isValid"); + + if (StringUtils.isBlank(isValid)) { + return Message.error("Failed to valid acrossClusterRule: Illegal Input Param"); + } + try { + acrossClusterRuleService.validAcrossClusterRule(id, isValid, username); + } catch (Exception e) { + return Message.error("valid acrossClusterRule failed:" + e.getMessage()); + } + + return Message.ok(); + } + + @ApiOperation( + value = "query acrossClusterRule list", + notes = "query acrossClusterRule list", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", dataType = "String", value = "user"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + }) + @RequestMapping(path = "/list", method = RequestMethod.GET) + public Message queryAcrossClusterRuleList( + HttpServletRequest req, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "user", required = false) String user, + @RequestParam(value = "clusterName", required = false) String clusterName, + @RequestParam(value = "pageNow", required = false) Integer pageNow, + @RequestParam(value = "pageSize", required = false) Integer pageSize) { + String username = ModuleUserUtils.getOperationUser(req, "execute query acrossClusterRule List"); + if (!Configuration.isAdmin(username)) { + return Message.error( + "Failed to query acrossClusterRule List,msg: only administrators can configure"); + } + + if (StringUtils.isBlank(user)) user = null; + if (StringUtils.isBlank(creator)) creator = null; + if (StringUtils.isBlank(clusterName)) clusterName = null; + if (null == pageNow) pageNow = 1; + if (null == pageSize) pageSize = 20; + + Map resultMap = null; + try { + resultMap = + acrossClusterRuleService.queryAcrossClusterRuleList( + creator, user, clusterName, pageNow, pageSize); + } catch (Exception e) { + log.info("query acrossClusterRule List failed:" + e.getMessage()); + return Message.error("query acrossClusterRule List failed"); + } + + Message msg = Message.ok(); + msg.getData().putAll(resultMap); + return msg; + } + + @ApiOperation( + value = "delete acrossClusterRule", + notes = "delete acrossClusterRule", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", dataType = "String", value = "user"), + }) + @RequestMapping(path = "/delete", method = RequestMethod.DELETE) + public Message deleteAcrossClusterRule( + HttpServletRequest req, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "user", required = false) String user) { + String username = ModuleUserUtils.getOperationUser(req, "execute delete acrossClusterRule"); + if (!Configuration.isAdmin(username)) { + return Message.error( + "Failed to delete acrossClusterRule,msg: only administrators can configure"); + } + + if (StringUtils.isBlank(creator) || StringUtils.isBlank(user)) { + return Message.error("Failed to delete acrossClusterRule: Illegal Input Param"); + } + + try { + acrossClusterRuleService.deleteAcrossClusterRule(creator, user); + } catch (Exception e) { + log.info("delete acrossClusterRule failed:" + e.getMessage()); + return Message.error("delete acrossClusterRule failed"); + } + + return Message.ok(); + } + + @ApiOperation( + value = "update acrossClusterRule", + notes = "update acrossClusterRule ", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "id", dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", dataType = "String", value = "user"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + @ApiImplicitParam(name = "startTime", dataType = "String", value = "startTime"), + @ApiImplicitParam(name = "endTime", dataType = "String", value = "endTime"), + @ApiImplicitParam(name = "CPUThreshold", dataType = "String", value = "CPUThreshold"), + @ApiImplicitParam(name = "MemoryThreshold", dataType = "String", value = "MemoryThreshold"), + @ApiImplicitParam( + name = "CPUPercentageThreshold", + dataType = "String", + value = "CPUPercentageThreshold"), + @ApiImplicitParam( + name = "MemoryPercentageThreshold", + dataType = "String", + value = "MemoryPercentageThreshold"), + }) + @RequestMapping(path = "/update", method = RequestMethod.PUT) + public Message updateAcrossClusterRule( + HttpServletRequest req, @RequestBody Map json) { + String username = ModuleUserUtils.getOperationUser(req, "execute update acrossClusterRule"); + if (!Configuration.isAdmin(username)) { + return Message.error( + "Failed to update acrossClusterRule,msg: only administrators can configure"); + } + + Integer idInt = (Integer) json.get("id"); + Long id = idInt.longValue(); + String clusterName = (String) json.get("clusterName"); + String creator = (String) json.get("creator"); + String user = (String) json.get("user"); + String isValid = (String) json.get("isValid"); + String startTime = (String) json.get("startTime"); + String endTime = (String) json.get("endTime"); + String CPUThreshold = (String) json.get("CPUThreshold"); + String MemoryThreshold = (String) json.get("MemoryThreshold"); + String CPUPercentageThreshold = (String) json.get("CPUPercentageThreshold"); + String MemoryPercentageThreshold = (String) json.get("MemoryPercentageThreshold"); + if (StringUtils.isBlank(clusterName) + || StringUtils.isBlank(creator) + || StringUtils.isBlank(user) + || StringUtils.isBlank(isValid) + || StringUtils.isBlank(startTime) + || StringUtils.isBlank(endTime) + || StringUtils.isBlank(CPUThreshold) + || StringUtils.isBlank(MemoryThreshold) + || StringUtils.isBlank(CPUPercentageThreshold) + || StringUtils.isBlank(MemoryPercentageThreshold)) { + return Message.error("Failed to add acrossClusterRule: Illegal Input Param"); + } + + try { + String rules = + CommonUtils.ruleMap2String( + startTime, + endTime, + CPUThreshold, + MemoryThreshold, + CPUPercentageThreshold, + MemoryPercentageThreshold); + AcrossClusterRule acrossClusterRule = new AcrossClusterRule(); + acrossClusterRule.setId(id); + acrossClusterRule.setClusterName(clusterName.toLowerCase()); + acrossClusterRule.setCreator(creator); + acrossClusterRule.setUser(user); + acrossClusterRule.setUpdateBy(username); + acrossClusterRule.setRules(rules); + acrossClusterRule.setIsValid(isValid); + acrossClusterRuleService.updateAcrossClusterRule(acrossClusterRule); + } catch (Exception e) { + log.info("update acrossClusterRule failed:" + e.getMessage()); + return Message.error("update acrossClusterRule failed:history already exist"); + } + return Message.ok(); + } + + @ApiOperation( + value = "add acrossClusterRule", + notes = "add acrossClusterRule ", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", dataType = "String", value = "user"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + @ApiImplicitParam(name = "startTime", dataType = "String", value = "startTime"), + @ApiImplicitParam(name = "endTime", dataType = "String", value = "endTime"), + @ApiImplicitParam(name = "CPUThreshold", dataType = "String", value = "CPUThreshold"), + @ApiImplicitParam(name = "MemoryThreshold", dataType = "String", value = "MemoryThreshold"), + @ApiImplicitParam( + name = "CPUPercentageThreshold", + dataType = "String", + value = "CPUPercentageThreshold"), + @ApiImplicitParam( + name = "MemoryPercentageThreshold", + dataType = "String", + value = "MemoryPercentageThreshold"), + }) + @RequestMapping(path = "/add", method = RequestMethod.POST) + public Message insertAcrossClusterRule( + HttpServletRequest req, @RequestBody Map json) { + String username = ModuleUserUtils.getOperationUser(req, "execute add acrossClusterRule"); + if (!Configuration.isAdmin(username)) { + return Message.error( + "Failed to add acrossClusterRule,msg: only administrators can configure"); + } + + String clusterName = (String) json.get("clusterName"); + String creator = (String) json.get("creator"); + String user = (String) json.get("user"); + String isValid = (String) json.get("isValid"); + String startTime = (String) json.get("startTime"); + String endTime = (String) json.get("endTime"); + String CPUThreshold = (String) json.get("CPUThreshold"); + String MemoryThreshold = (String) json.get("MemoryThreshold"); + String CPUPercentageThreshold = (String) json.get("CPUPercentageThreshold"); + String MemoryPercentageThreshold = (String) json.get("MemoryPercentageThreshold"); + if (StringUtils.isBlank(clusterName) + || StringUtils.isBlank(creator) + || StringUtils.isBlank(user) + || StringUtils.isBlank(isValid) + || StringUtils.isBlank(startTime) + || StringUtils.isBlank(endTime) + || StringUtils.isBlank(CPUThreshold) + || StringUtils.isBlank(MemoryThreshold) + || StringUtils.isBlank(CPUPercentageThreshold) + || StringUtils.isBlank(MemoryPercentageThreshold)) { + return Message.error("Failed to add acrossClusterRule: Illegal Input Param"); + } + + try { + String rules = + CommonUtils.ruleMap2String( + startTime, + endTime, + CPUThreshold, + MemoryThreshold, + CPUPercentageThreshold, + MemoryPercentageThreshold); + AcrossClusterRule acrossClusterRule = new AcrossClusterRule(); + acrossClusterRule.setClusterName(clusterName.toLowerCase()); + acrossClusterRule.setCreator(creator); + acrossClusterRule.setUser(user); + acrossClusterRule.setCreateBy(username); + acrossClusterRule.setUpdateBy(username); + acrossClusterRule.setRules(rules); + acrossClusterRule.setIsValid(isValid); + acrossClusterRuleService.insertAcrossClusterRule(acrossClusterRule); + } catch (Exception e) { + log.info("add acrossClusterRule failed:" + e.getMessage()); + return Message.error("add acrossClusterRule failed:history already exist"); + } + + return Message.ok(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java index dd4bbc258b..d2847a6d11 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java @@ -26,6 +26,7 @@ import org.apache.linkis.configuration.util.ConfigurationConfiguration; import org.apache.linkis.configuration.util.JsonNodeUtil; import org.apache.linkis.configuration.util.LabelEntityParser; +import org.apache.linkis.configuration.validate.ValidatorManager; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.manager.label.utils.LabelUtils; @@ -33,6 +34,7 @@ import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -42,12 +44,12 @@ import java.io.IOException; import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.util.*; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -71,6 +73,8 @@ public class ConfigurationRestfulApi { @Autowired private ConfigKeyService configKeyService; + @Autowired private ValidatorManager validatorManager; + ObjectMapper mapper = new ObjectMapper(); private static final String NULL = "null"; @@ -144,6 +148,7 @@ public Message getFullTreesByAppName( ArrayList configTrees = configurationService.getFullTreeByLabelList( labelList, true, req.getHeader("Content-Language")); + return Message.ok().data("fullTree", configTrees); } @@ -152,9 +157,49 @@ public Message getFullTreesByAppName( public Message getCategory(HttpServletRequest req) { List categoryLabelList = categoryService.getAllCategory(req.getHeader("Content-Language")); + return Message.ok().data("Category", categoryLabelList); } + @ApiOperation( + value = "getItemList", + notes = "get configuration list by engineType", + response = Message.class) + @RequestMapping(path = "/getItemList", method = RequestMethod.GET) + public Message getItemList( + HttpServletRequest req, @RequestParam(value = "engineType") String engineType) + throws ConfigurationException { + String userName = + ModuleUserUtils.getOperationUser(req, "getItemList with engineType:" + engineType); + // Adding * represents returning all configuration information + if ("*".equals(engineType)) { + engineType = null; + } + List result = configKeyService.getConfigKeyList(engineType); + List filterResult = new ArrayList<>(); + for (ConfigKey configKey : result) { + Map temp = new HashMap(); + temp.put("key", configKey.getKey()); + temp.put("name", configKey.getName()); + temp.put("description", configKey.getDescription()); + temp.put("engineType", configKey.getEngineType()); + temp.put("validateType", configKey.getValidateType()); + temp.put("validateRange", configKey.getValidateRange()); + temp.put("boundaryType", configKey.getBoundaryType()); + temp.put("defaultValue", configKey.getDefaultValue()); + // for front-end to judge whether input is required + if (StringUtils.isNotEmpty(configKey.getDefaultValue())) { + temp.put("require", "true"); + } else { + temp.put("require", "false"); + } + + filterResult.add(temp); + } + + return Message.ok().data("itemList", filterResult); + } + @ApiOperation( value = "createFirstCategory", notes = "create first category", @@ -275,9 +320,39 @@ public Message saveFullTree(HttpServletRequest req, @RequestBody JsonNode json) version = tmpString[1]; } configurationService.updateUserValue(createList, updateList); - configurationService.clearAMCacheConf(username, creator, engine, version); - Message message = Message.ok(); - return message; + // TODO: Add a refresh cache interface later + if (StringUtils.isNotBlank(creator) && creator.equals("*")) { + List allCategory = categoryService.getAllCategory(null); + List categoryLabelVos = + allCategory.stream() + .filter(s -> s.getCategoryName().equals(Configuration.REMOVE_APPLICATION_CACHE())) + .map(CategoryLabelVo::getChildCategory) + .findFirst() + .get(); + categoryLabelVos.stream() + .map(CategoryLabelVo::getCategoryName) + .filter(StringUtils::isNotBlank) + .forEach( + info -> { + String[] tmpString = info.split("-"); + if (tmpString.length == 2) { + String engineName = tmpString[0]; + String engineVersion = tmpString[1]; + logger.info( + "Config remove engine cache:engineName:{},engineVersion:{}", + engineName, + engineVersion); + configurationService.clearAMCacheConf( + username, + Configuration.REMOVE_APPLICATION_CACHE(), + engineName, + engineVersion); + } + }); + } else { + configurationService.clearAMCacheConf(username, creator, engine, version); + } + return Message.ok(); } @ApiOperation( @@ -346,7 +421,7 @@ public Message rpcTest( private void checkAdmin(String userName) throws ConfigurationException { if (!org.apache.linkis.common.conf.Configuration.isAdmin(userName)) { - throw new ConfigurationException(ONLY_ADMIN_CAN_MODIFY.getErrorDesc()); + throw new ConfigurationException(ONLY_ADMIN_PERFORM.getErrorDesc()); } } @@ -394,18 +469,27 @@ public Message getKeyValue( @RequestMapping(path = "/keyvalue", method = RequestMethod.POST) public Message saveKeyValue(HttpServletRequest req, @RequestBody Map json) throws ConfigurationException { + Message message = Message.ok(); String username = ModuleUserUtils.getOperationUser(req, "saveKey"); String engineType = (String) json.getOrDefault("engineType", "*"); + String user = (String) json.getOrDefault("user", ""); String version = (String) json.getOrDefault("version", "*"); String creator = (String) json.getOrDefault("creator", "*"); String configKey = (String) json.get("configKey"); String value = (String) json.get("configValue"); + boolean force = Boolean.parseBoolean(json.getOrDefault("force", "false").toString()); + if (!org.apache.linkis.common.conf.Configuration.isAdmin(username) && !username.equals(user)) { + return Message.error("Only admin can modify other user configuration data"); + } if (engineType.equals("*") && !version.equals("*")) { return Message.error("When engineType is any engine, the version must also be any version"); } if (StringUtils.isBlank(configKey) || StringUtils.isBlank(value)) { return Message.error("key or value cannot be empty"); } + if (StringUtils.isNotBlank(user)) { + username = user; + } List labelList = LabelEntityParser.generateUserCreatorEngineTypeLabelList( username, creator, engineType, version); @@ -414,9 +498,18 @@ public Message saveKeyValue(HttpServletRequest req, @RequestBody Map configValues = configKeyService.deleteConfigValue(configKey, labelList); return Message.ok().data("configValues", configValues); } + + @ApiOperation(value = "getBaseKeyValue", notes = "get key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "key", required = false, dataType = "String", value = "key"), + @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", defaultValue = "1"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + defaultValue = "20"), + }) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.GET) + public Message getBaseKeyValue( + HttpServletRequest req, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "key", required = false) String key, + @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "getBaseKeyValue")); + if (StringUtils.isBlank(engineType)) { + engineType = null; + } + if (StringUtils.isBlank(key)) { + key = null; + } + PageHelper.startPage(pageNow, pageSize); + List list = null; + try { + list = configKeyService.getConfigBykey(engineType, key, req.getHeader("Content-Language")); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(list); + long total = pageInfo.getTotal(); + return Message.ok().data("configKeyList", list).data("totalPage", total); + } + + @ApiOperation(value = "deleteBaseKeyValue", notes = "delete key", response = Message.class) + @ApiImplicitParams({@ApiImplicitParam(name = "id", required = true, dataType = "Integer")}) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.DELETE) + public Message deleteBaseKeyValue(HttpServletRequest req, @RequestParam(value = "id") Integer id) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "deleteBaseKeyValue ID:" + id)); + configKeyService.deleteConfigById(id); + return Message.ok(); + } + + @ApiOperation(value = "saveBaseKeyValue", notes = "save key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", required = false, dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "key", required = true, dataType = "String", value = "key"), + @ApiImplicitParam(name = "name", required = true, dataType = "String", value = "name"), + @ApiImplicitParam( + name = "description", + required = true, + dataType = "String", + value = "description"), + @ApiImplicitParam( + name = "defaultValue", + required = true, + dataType = "String", + value = "defaultValue"), + @ApiImplicitParam( + name = "validateType", + required = true, + dataType = "String", + value = "validateType"), + @ApiImplicitParam( + name = "validateRange", + required = true, + dataType = "String", + value = "validateRange"), + @ApiImplicitParam( + name = "boundaryType", + required = true, + dataType = "String", + value = "boundaryType"), + @ApiImplicitParam(name = "treeName", required = true, dataType = "String", value = "treeName"), + @ApiImplicitParam( + name = "engineType", + required = true, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "enName", required = false, dataType = "String", value = "enName"), + @ApiImplicitParam( + name = "enDescription", + required = false, + dataType = "String", + value = "enDescription"), + @ApiImplicitParam( + name = "enTreeName", + required = false, + dataType = "String", + value = "enTreeName") + }) + @ApiOperationSupport(ignoreParameters = {"json"}) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.POST) + public Message saveBaseKeyValue(HttpServletRequest req, @RequestBody ConfigKey configKey) + throws ConfigurationException, InstantiationException, IllegalAccessException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "saveBaseKeyValue")); + String key = configKey.getKey(); + String name = configKey.getName(); + String treeName = configKey.getTreeName(); + String description = configKey.getDescription(); + Integer boundaryType = configKey.getBoundaryType(); + String defaultValue = configKey.getDefaultValue(); + String validateType = configKey.getValidateType(); + String validateRange = configKey.getValidateRange(); + String engineType = configKey.getEngineType(); + if (StringUtils.isBlank(key)) { + return Message.error("key cannot be empty"); + } + if (StringUtils.isBlank(name)) { + return Message.error("name cannot be empty"); + } + if (StringUtils.isBlank(description)) { + return Message.error("description cannot be empty"); + } + if (StringUtils.isBlank(treeName)) { + return Message.error("treeName cannot be empty"); + } + if (StringUtils.isBlank(validateType)) { + return Message.error("validateType cannot be empty"); + } + if (!validateType.equals("None") && StringUtils.isBlank(validateRange)) { + return Message.error("validateRange cannot be empty"); + } + if (null == boundaryType) { + return Message.error("boundaryType cannot be empty"); + } + if (StringUtils.isNotEmpty(defaultValue) + && !validatorManager + .getOrCreateValidator(validateType) + .validate(defaultValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter configValue verification failed(参数defaultValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2},ConfigValue:{3}", + key, validateType, validateRange, defaultValue); + throw new ConfigurationException(msg); + } + if (null == configKey.getId()) { + List configBykey = + configKeyService.getConfigBykey(engineType, key, req.getHeader("Content-Language")); + if (CollectionUtils.isNotEmpty(configBykey)) { + return Message.error("The engine has the same key: " + key); + } + configKeyService.saveConfigKey(configKey); + } else { + configKey.setId(configKey.getId()); + configKeyService.updateConfigKey(configKey); + } + return Message.ok().data("configKey", configKey); + } + + @ApiOperation(value = "getUserkeyvalue", notes = "get key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "key", required = false, dataType = "String", value = "key"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", required = false, dataType = "String", value = "user"), + @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", defaultValue = "1"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + defaultValue = "20"), + }) + @RequestMapping(path = "/userKeyValue", method = RequestMethod.GET) + public Message getUserKeyValue( + HttpServletRequest req, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "key", required = false) String key, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "user", required = false) String user, + @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "getUserKeyValue")); + if (StringUtils.isBlank(engineType)) { + engineType = null; + } + if (StringUtils.isBlank(key)) { + key = null; + } + if (StringUtils.isBlank(creator)) { + creator = null; + } + if (StringUtils.isBlank(user)) { + user = null; + } + PageHelper.startPage(pageNow, pageSize); + List list; + try { + list = configKeyService.getUserConfigValue(engineType, key, creator, user); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(list); + long total = pageInfo.getTotal(); + return Message.ok().data("configValueList", list).data("totalPage", total); + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationTemplateRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationTemplateRestfulApi.java new file mode 100644 index 0000000000..75355c0e07 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationTemplateRestfulApi.java @@ -0,0 +1,278 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.restful.api; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.configuration.service.TemplateConfigKeyService; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "configuration template") +@RestController +@RequestMapping(path = "/configuration/template") +public class ConfigurationTemplateRestfulApi { + + private static final Logger logger = + LoggerFactory.getLogger(ConfigurationTemplateRestfulApi.class); + + @Autowired private TemplateConfigKeyService templateConfigKeyService; + + @ApiOperation( + value = "updateKeyMapping", + notes = "query engineconn info list", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "templateUid", + dataType = "String", + required = true, + value = "templateUid"), + @ApiImplicitParam( + name = "templateName", + dataType = "String", + required = true, + value = "engine type"), + @ApiImplicitParam(name = "engineType", dataType = "String", required = true, value = "String"), + @ApiImplicitParam(name = "operator", dataType = "String", value = "operator"), + @ApiImplicitParam(name = "isFullMode", dataType = "Boolbean", value = "isFullMode"), + @ApiImplicitParam(name = "itemList", dataType = "Array", value = "itemList"), + }) + @RequestMapping(path = "/updateKeyMapping", method = RequestMethod.POST) + public Message updateKeyMapping(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "updateKeyMapping"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to updateKeyMapping.", token); + return Message.error("Token:" + token + " has no permission to updateKeyMapping."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to updateKeyMapping.", username); + return Message.error("User:" + username + " has no permission to updateKeyMapping."); + } + + String templateUid = jsonNode.get("templateUid").asText(); + String templateName = jsonNode.get("templateName").asText(); + String engineType = jsonNode.get("engineType").asText(); + String operator = jsonNode.get("operator").asText(); + + if (StringUtils.isBlank(templateUid)) { + return Message.error("parameters:templateUid can not be empty(请求参数【templateUid】不能为空)"); + } + if (StringUtils.isBlank(templateName)) { + return Message.error("parameters:templateName can not be empty(请求参数【templateName】不能为空)"); + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can not be empty(请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(operator)) { + return Message.error("parameters:operator can not be empty(请求参数【operator】不能为空)"); + } + boolean isFullMode = true; + try { + isFullMode = jsonNode.get("isFullMode").asBoolean(); + logger.info("will update by param isFullMode:" + isFullMode); + } catch (Exception e) { + logger.info("will update by default isFullMode:" + isFullMode); + } + + JsonNode itemParms = jsonNode.get("itemList"); + + List confKeyList = new ArrayList<>(); + if (itemParms != null && !itemParms.isNull()) { + try { + confKeyList = + JsonUtils.jackson() + .readValue(itemParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:itemList parsing failed(请求参数【itemList】解析失败), error with:" + e.getMessage()); + } + } else { + return Message.error("parameters:itemList can not be empty(请求参数【itemList】不能为空)"); + } + + logger.info( + "request parameters templateUid:{}, templateName:{}, engineType:{}, operator:{},isFullMode:{}, itemList:[{}]", + templateUid, + templateName, + engineType, + operator, + itemParms.asText()); + + templateConfigKeyService.updateKeyMapping( + templateUid, templateName, engineType, operator, confKeyList, isFullMode); + return Message.ok(); + } + + @ApiOperation(value = "queryKeyInfoList", notes = "query key info list", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "templateUidList", dataType = "Array", value = "templateUidList"), + }) + @RequestMapping(path = "/queryKeyInfoList", method = RequestMethod.POST) + public Message queryKeyInfoList(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "queryKeyInfoList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to queryKeyInfoList.", token); + return Message.error("Token:" + token + " has no permission to queryKeyInfoList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to queryKeyInfoList.", username); + return Message.error("User:" + username + " has no permission to queryKeyInfoList."); + } + + JsonNode templateUidListParms = jsonNode.get("templateUidList"); + + List uuidList = new ArrayList<>(); + if (templateUidListParms != null && !templateUidListParms.isNull()) { + try { + uuidList = + JsonUtils.jackson() + .readValue(templateUidListParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:templateUidList parsing failed(请求参数【templateUidList】解析失败), error with:" + + e.getMessage()); + } + } else { + return Message.error( + "parameters:templateUidList can not be empty(请求参数【templateUidList】不能为空)"); + } + + List result = templateConfigKeyService.queryKeyInfoList(uuidList); + + return Message.ok().data("list", result); + } + + @ApiOperation(value = "apply", notes = "apply conf template rule", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "templateUid", + dataType = "String", + required = true, + value = "templateUid"), + @ApiImplicitParam(name = "application", dataType = "String", value = "application"), + @ApiImplicitParam(name = "engineType", dataType = "String", value = "engineType"), + @ApiImplicitParam(name = "engineVersion", dataType = "String", value = "engineVersion"), + @ApiImplicitParam(name = "operator", dataType = "String", value = "operator"), + @ApiImplicitParam(name = "userList", dataType = "Array", value = "userList"), + }) + @RequestMapping(path = "/apply", method = RequestMethod.POST) + public Message apply(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "apply"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to apply.", token); + return Message.error("Token:" + token + " has no permission to apply."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to apply.", username); + return Message.error("User:" + username + " has no permission to apply."); + } + + String templateUid = jsonNode.get("templateUid").asText(); + String application = jsonNode.get("application").asText(); + String engineType = jsonNode.get("engineType").asText(); + String engineVersion = jsonNode.get("engineVersion").asText(); + String operator = jsonNode.get("operator").asText(); + + if (StringUtils.isBlank(templateUid)) { + return Message.error("parameters:templateUid can not be empty(请求参数【templateUid】不能为空)"); + } + if (StringUtils.isBlank(application)) { + return Message.error("parameters:application can not be empty(请求参数【application】不能为空)"); + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can not be empty(请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(engineVersion)) { + return Message.error("parameters:engineVersion can not be empty(请求参数【engineVersion】不能为空)"); + } + if (StringUtils.isBlank(operator)) { + return Message.error("parameters:operator can not be empty(请求参数【operator】不能为空)"); + } + + JsonNode userParms = jsonNode.get("userList"); + List userList = new ArrayList<>(); + if (userParms != null && !userParms.isNull()) { + try { + userList = + JsonUtils.jackson() + .readValue(userParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:userList parsing failed(请求参数【userList】解析失败), error with:" + e.getMessage()); + } + } else { + return Message.error("parameters:userList can not be empty(请求参数【userList】不能为空)"); + } + + logger.info( + "request parameters templateUid:{}, application:{}, engineType:{}, engineVersion:{}, operator:{},userList:[{}]", + templateUid, + application, + engineType, + engineVersion, + operator, + String.join(",", userList)); + + Map result = + templateConfigKeyService.apply( + templateUid, application, engineType, engineVersion, operator, userList); + + Message message = Message.ok(); + message.getData().putAll(result); + return message; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TenantConfigrationRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TenantConfigrationRestfulApi.java index e399849be3..9a41fe67cc 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TenantConfigrationRestfulApi.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TenantConfigrationRestfulApi.java @@ -75,7 +75,7 @@ public Message createTenant(HttpServletRequest req, @RequestBody TenantVo tenant if (!Configuration.isAdmin(userName)) { return Message.error("Failed to create-tenant,msg: only administrators can configure"); } - if (tenantConfigService.userExists(tenantVo.getUser(), tenantVo.getCreator(), null)) { + if (tenantConfigService.isExist(tenantVo.getUser(), tenantVo.getCreator())) { throw new ConfigurationException("User-creator is existed"); } parameterVerification(tenantVo); @@ -225,8 +225,7 @@ public Message queryTenantList( public Message checkUserCreator( HttpServletRequest req, @RequestParam(value = "user", required = false) String user, - @RequestParam(value = "creator", required = false) String creator, - @RequestParam(value = "tenantValue", required = false) String tenantValue) { + @RequestParam(value = "creator", required = false) String creator) { Boolean result = false; try { // Parameter verification @@ -236,14 +235,11 @@ public Message checkUserCreator( if (StringUtils.isBlank(user)) { throw new ConfigurationException("User Name can't be empty "); } - if (creator.equals("*")) { - throw new ConfigurationException("Application Name can't be '*' "); - } String userName = ModuleUserUtils.getOperationUser(req, "checkUserCreator"); if (!Configuration.isAdmin(userName)) { return Message.error("Failed to check-user-creator,msg: only administrators can configure"); } - result = tenantConfigService.userExists(user, creator, tenantValue); + result = tenantConfigService.isExist(user, creator); } catch (ConfigurationException e) { return Message.error("Failed to check-user-creator,msg:" + e.getMessage()); } @@ -267,5 +263,8 @@ private void parameterVerification(TenantVo tenantVo) throws ConfigurationExcept if (StringUtils.isBlank(tenantVo.getTenantValue())) { throw new ConfigurationException("Tenant tag can't be empty "); } + if (tenantVo.getCreator().equals("*") && tenantVo.getUser().equals("*")) { + throw new ConfigurationException("User && Creator cannot be both *"); + } } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionResult.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java similarity index 55% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionResult.java rename to linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java index 3d5cdef7e3..30588cb1ac 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/result/ExecutionResult.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java @@ -15,23 +15,23 @@ * limitations under the License. */ -package org.apache.linkis.cli.common.entity.result; +package org.apache.linkis.configuration.service; -import org.apache.linkis.cli.common.entity.job.Job; +import org.apache.linkis.configuration.entity.AcrossClusterRule; import java.util.Map; -public interface ExecutionResult { - Map getJobs(); +public interface AcrossClusterRuleService { - ExecutionStatus getExecutionStatus(); + void deleteAcrossClusterRule(String creator, String user) throws Exception; - void setExecutionStatus(ExecutionStatus executionStatus); + void updateAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception; - Exception getException(); // TODO: put exception during execution in here and do not interrupt - // execution + void insertAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception; - void setException( - Exception exception); // TODO: put exception during execution in here and do not interrupt - // execution + Map queryAcrossClusterRuleList( + String creator, String user, String clusterName, Integer pageNow, Integer pageSize) + throws Exception; + + void validAcrossClusterRule(Long id, String isValid, String updateBy) throws Exception; } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java index 665f359483..758ac9e91d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java @@ -17,7 +17,9 @@ package org.apache.linkis.configuration.service; +import org.apache.linkis.configuration.entity.ConfigKey; import org.apache.linkis.configuration.entity.ConfigKeyValue; +import org.apache.linkis.configuration.entity.ConfigUserValue; import org.apache.linkis.configuration.entity.ConfigValue; import org.apache.linkis.configuration.exception.ConfigurationException; import org.apache.linkis.manager.label.entity.Label; @@ -32,6 +34,19 @@ ConfigValue saveConfigValue(ConfigKeyValue configKeyValue, List> labelL List getConfigValue(String configKey, List> labelList) throws ConfigurationException; + List getConfigKeyList(String engineType) throws ConfigurationException; + List deleteConfigValue(String configKey, List> labelList) throws ConfigurationException; + + List getConfigBykey(String engineType, String key, String language); + + void deleteConfigById(Integer id); + + ConfigKey saveConfigKey(ConfigKey configKey); + + List getUserConfigValue( + String engineType, String key, String creator, String user); + + void updateConfigKey(ConfigKey configKey); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java new file mode 100644 index 0000000000..bde686c6d0 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfRequest; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfResponse; + +import java.util.List; +import java.util.Map; + +public interface TemplateConfigKeyService { + + Boolean updateKeyMapping( + String templateUid, + String templateName, + String engineType, + String operator, + List itemList, + Boolean isFullMode) + throws ConfigurationException; + + List queryKeyInfoList(List uuidList) throws ConfigurationException; + + Map apply( + String templateUid, + String application, + String engineType, + String engineVersion, + String operator, + List userList) + throws ConfigurationException; + + TemplateConfResponse queryKeyInfoList(TemplateConfRequest templateConfRequest); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TenantConfigService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TenantConfigService.java index 3d07ad6762..87b14a9c5e 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TenantConfigService.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TenantConfigService.java @@ -33,7 +33,7 @@ Map queryTenantList( void createTenant(TenantVo tenantVo) throws ConfigurationException; - Boolean userExists(String user, String creator, String tenantValue) throws ConfigurationException; + Boolean isExist(String user, String creator) throws ConfigurationException; TenantVo queryTenant(String user, String creator); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java new file mode 100644 index 0000000000..377c27fd72 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service.impl; + +import org.apache.linkis.configuration.dao.AcrossClusterRuleMapper; +import org.apache.linkis.configuration.entity.AcrossClusterRule; +import org.apache.linkis.configuration.service.AcrossClusterRuleService; +import org.apache.linkis.governance.common.constant.job.JobRequestConstants; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.*; + +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class AcrossClusterRuleServiceImpl implements AcrossClusterRuleService { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + @Autowired private AcrossClusterRuleMapper ruleMapper; + + @Override + public void deleteAcrossClusterRule(String creator, String user) throws Exception { + ruleMapper.deleteAcrossClusterRule(creator, user); + logger.info("delete acrossClusterRule success"); + return; + } + + @Override + public void updateAcrossClusterRule(AcrossClusterRule newRule) throws Exception { + AcrossClusterRule beforeRule = ruleMapper.getAcrossClusterRule(newRule.getId()); + if (beforeRule == null) { + logger.info("acrossClusterRule not exit"); + throw new Exception("acrossClusterRule not exit"); + } + + Date time = new Date(); + newRule.setCreateBy(beforeRule.getCreateBy()); + newRule.setCreateTime(beforeRule.getCreateTime()); + newRule.setUpdateTime(time); + + ruleMapper.updateAcrossClusterRule(newRule); + logger.info("update acrossClusterRule success"); + return; + } + + @Override + public void insertAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception { + Date time = new Date(); + acrossClusterRule.setCreateTime(time); + acrossClusterRule.setUpdateTime(time); + ruleMapper.insertAcrossClusterRule(acrossClusterRule); + logger.info("insert acrossClusterRule success"); + return; + } + + @Override + public Map queryAcrossClusterRuleList( + String creator, String user, String clusterName, Integer pageNow, Integer pageSize) { + Map result = new HashMap<>(2); + List acrossClusterRules = null; + if (Objects.isNull(pageNow)) { + pageNow = 1; + } + if (Objects.isNull(pageSize)) { + pageSize = 20; + } + PageHelper.startPage(pageNow, pageSize); + + try { + acrossClusterRules = ruleMapper.queryAcrossClusterRuleList(user, creator, clusterName); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(acrossClusterRules); + result.put("acrossClusterRuleList", acrossClusterRules); + result.put(JobRequestConstants.TOTAL_PAGE(), pageInfo.getTotal()); + return result; + } + + @Override + public void validAcrossClusterRule(Long id, String isValid, String updateBy) throws Exception { + AcrossClusterRule acrossClusterRule = ruleMapper.getAcrossClusterRule(id); + if (acrossClusterRule == null) { + logger.info("acrossClusterRule not exit"); + throw new Exception("acrossClusterRule not exit"); + } + acrossClusterRule.setIsValid(isValid); + acrossClusterRule.setUpdateBy(updateBy); + acrossClusterRule.setUpdateTime(new Date()); + logger.info("delete acrossClusterRule success"); + ruleMapper.validAcrossClusterRule(acrossClusterRule); + logger.info("valid acrossClusterRule success"); + return; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java index 6811b5e7e2..612c6c2599 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java @@ -19,10 +19,7 @@ import org.apache.linkis.configuration.dao.ConfigMapper; import org.apache.linkis.configuration.dao.LabelMapper; -import org.apache.linkis.configuration.entity.ConfigKey; -import org.apache.linkis.configuration.entity.ConfigKeyValue; -import org.apache.linkis.configuration.entity.ConfigLabel; -import org.apache.linkis.configuration.entity.ConfigValue; +import org.apache.linkis.configuration.entity.*; import org.apache.linkis.configuration.exception.ConfigurationException; import org.apache.linkis.configuration.service.ConfigKeyService; import org.apache.linkis.configuration.util.LabelEntityParser; @@ -163,6 +160,11 @@ public List getConfigValue(String key, List> labelList) return configValues; } + @Override + public List getConfigKeyList(String engineType) throws ConfigurationException { + return configMapper.selectKeyByEngineType(engineType); + } + @Override public List deleteConfigValue(String key, List> labelList) throws ConfigurationException { @@ -174,4 +176,37 @@ public List deleteConfigValue(String key, List> labelList) logger.info("succeed to remove key: {} by label:{} ", key, combinedLabel.getStringValue()); return configValues; } + + @Override + public List getConfigBykey(String engineType, String key, String language) { + List configkeyList; + if ("en".equals(language)) { + configkeyList = configMapper.getConfigEnBykey(engineType, key); + } else { + configkeyList = configMapper.getConfigBykey(engineType, key); + } + return configkeyList; + } + + @Override + public void deleteConfigById(Integer id) { + configMapper.deleteConfigKey(id); + } + + @Override + public ConfigKey saveConfigKey(ConfigKey configKey) { + configMapper.insertKeyByBase(configKey); + return null; + } + + @Override + public List getUserConfigValue( + String engineType, String key, String creator, String user) { + return configMapper.getUserConfigValue(key, user, creator, engineType); + } + + @Override + public void updateConfigKey(ConfigKey configKey) { + configMapper.updateConfigKey(configKey); + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java new file mode 100644 index 0000000000..08dc39812f --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java @@ -0,0 +1,455 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service.impl; + +import org.apache.linkis.configuration.dao.ConfigKeyLimitForUserMapper; +import org.apache.linkis.configuration.dao.ConfigMapper; +import org.apache.linkis.configuration.dao.LabelMapper; +import org.apache.linkis.configuration.dao.TemplateConfigKeyMapper; +import org.apache.linkis.configuration.entity.*; +import org.apache.linkis.configuration.enumeration.BoundaryTypeEnum; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.configuration.service.ConfigurationService; +import org.apache.linkis.configuration.service.TemplateConfigKeyService; +import org.apache.linkis.configuration.util.LabelEntityParser; +import org.apache.linkis.configuration.validate.ValidatorManager; +import org.apache.linkis.governance.common.entity.TemplateConfKey; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfRequest; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfResponse; +import org.apache.linkis.manager.label.entity.CombinedLabel; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class TemplateConfigKeyServiceImpl implements TemplateConfigKeyService { + + private static final Logger logger = LoggerFactory.getLogger(TemplateConfigKeyServiceImpl.class); + + @Autowired private ConfigMapper configMapper; + + @Autowired private LabelMapper labelMapper; + + @Autowired private TemplateConfigKeyMapper templateConfigKeyMapper; + + @Autowired private ConfigurationService configurationService; + + @Autowired private ValidatorManager validatorManager; + + @Autowired private ConfigKeyLimitForUserMapper configKeyLimitForUserMapper; + + @Autowired private PlatformTransactionManager platformTransactionManager; + + @Override + @Transactional + public Boolean updateKeyMapping( + String templateUid, + String templateName, + String engineType, + String operator, + List itemList, + Boolean isFullMode) + throws ConfigurationException { + + // Query the corresponding data and check the validity of the data(查询对应的数据 并做数据合法性检查) + List keyList = itemList.stream().map(e -> e.getKey()).collect(Collectors.toList()); + List configKeyList = + configMapper.selectKeyByEngineTypeAndKeyList(engineType, keyList); + // List of key ids to be updated(待更新的key id 列表) + List keyIdList = configKeyList.stream().map(e -> e.getId()).collect(Collectors.toList()); + if (configKeyList.size() != itemList.size()) { + List dbKeyList = + configKeyList.stream().map(e -> e.getKey()).collect(Collectors.toList()); + String msg = + MessageFormat.format( + "The num of configuration item data from the DB is inconsistent with input(从DB中获取到的配置数据条数不一致) :" + + "engineType:{0}, input keys:{1}, db keys:{2}", + engineType, String.join(",", keyList), String.join(",", dbKeyList)); + throw new ConfigurationException(msg); + } + + List toUpdateOrInsertList = new ArrayList<>(); + + // map k:v---> key:ConfigKey + Map configKeyMap = + configKeyList.stream().collect(Collectors.toMap(ConfigKey::getKey, item -> item)); + for (ConfigKeyLimitVo item : itemList) { + + String key = item.getKey(); + ConfigKey temp = configKeyMap.get(item.getKey()); + String validateType = temp.getValidateType(); + String validateRange = temp.getValidateRange(); + String configValue = item.getConfigValue(); + String maxValue = item.getMaxValue(); + + if (StringUtils.isNotEmpty(configValue) + && !validatorManager + .getOrCreateValidator(validateType) + .validate(configValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter configValue verification failed(参数configValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2},ConfigValue:{3}", + key, validateType, validateRange, configValue); + throw new ConfigurationException(msg); + } + + if (StringUtils.isNotEmpty(maxValue) + && BoundaryTypeEnum.WITH_BOTH.getId().equals(temp.getBoundaryType())) { + if (!validatorManager + .getOrCreateValidator(validateType) + .validate(maxValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter maxValue verification failed(参数maxValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2}, maxValue:{3}", + key, validateType, validateRange, maxValue); + throw new ConfigurationException(msg); + } + + try { + Integer maxVal = Integer.valueOf(maxValue.replaceAll("[^0-9]", "")); + Integer configVal = Integer.valueOf(configValue.replaceAll("[^0-9]", "")); + if (configVal > maxVal) { + String msg = + MessageFormat.format( + "Parameter key:{0},config value:{1} verification failed, " + + "exceeds the specified max value: {2}:(参数校验失败,超过指定的最大值):", + key, configVal, maxVal); + throw new ConfigurationException(msg); + } + } catch (Exception exception) { + if (exception instanceof ConfigurationException) { + throw exception; + } else { + logger.warn( + "Failed to check special limit setting for key:" + + key + + ",config value:" + + configValue); + } + } + } + ; + + Long keyId = temp.getId(); + + TemplateConfigKey templateConfigKey = new TemplateConfigKey(); + templateConfigKey.setTemplateName(templateName); + templateConfigKey.setTemplateUuid(templateUid); + templateConfigKey.setKeyId(keyId); + templateConfigKey.setConfigValue(configValue); + templateConfigKey.setMaxValue(maxValue); + templateConfigKey.setCreateBy(operator); + templateConfigKey.setUpdateBy(operator); + toUpdateOrInsertList.add(templateConfigKey); + } + // Update data according to different mode + if (isFullMode) { + // The data previously in the database needs to be removed + List oldList = + templateConfigKeyMapper.selectListByTemplateUuid(templateUid); + List needToRemoveList = + oldList.stream() + .filter( + item -> { + return !keyIdList.contains(item.getKeyId()); + }) + .map(e -> e.getKeyId()) + .collect(Collectors.toList()); + if (needToRemoveList.size() > 0) { + logger.info( + "Try to remove old data:[" + needToRemoveList + "] for templateUid:" + templateUid); + templateConfigKeyMapper.deleteByTemplateUuidAndKeyIdList(templateUid, needToRemoveList); + } + } + + if (toUpdateOrInsertList.size() == 0) { + String msg = "No key data to update, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + templateConfigKeyMapper.batchInsertOrUpdateList(toUpdateOrInsertList); + + return true; + } + + @Override + public List queryKeyInfoList(List uuidList) throws ConfigurationException { + List result = new ArrayList<>(); + + List templateConfigKeyList = + templateConfigKeyMapper.selectListByTemplateUuidList(uuidList); + + Map> templateConfigKeyListGroupByUuid = + templateConfigKeyList.stream() + .collect(Collectors.groupingBy(TemplateConfigKey::getTemplateUuid)); + + List keyIdList = + templateConfigKeyList.stream() + .map(e -> e.getKeyId()) + .distinct() + .collect(Collectors.toList()); + + if (keyIdList.size() == 0) { + String msg = "can not get any config key info from db, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + List configKeyList = configMapper.selectKeyByKeyIdList(keyIdList); + // map k:v---> keyId:ConfigKey + Map configKeyMap = + configKeyList.stream().collect(Collectors.toMap(ConfigKey::getId, item -> item)); + + for (String uuid : templateConfigKeyListGroupByUuid.keySet()) { + Map item = new HashMap(); + List keys = new ArrayList<>(); + item.put("templateUid", uuid); + + List group = templateConfigKeyListGroupByUuid.get(uuid); + for (TemplateConfigKey templateConfigKey : group) { + Map temp = new HashMap(); + + temp.put("configValue", templateConfigKey.getConfigValue()); + temp.put("maxValue", templateConfigKey.getMaxValue()); + temp.put("createBy", templateConfigKey.getCreateBy()); + temp.put("createTime", templateConfigKey.getCreateTime()); + temp.put("updateBy", templateConfigKey.getUpdateBy()); + temp.put("updateTime", templateConfigKey.getUpdateTime()); + temp.put("keyId", templateConfigKey.getKeyId()); + + ConfigKey info = configKeyMap.get(templateConfigKey.getKeyId()); + if (info != null) { + temp.put("key", info.getKey()); + temp.put("name", info.getName()); + temp.put("description", info.getDescription()); + temp.put("engineType", info.getEngineType()); + temp.put("validateType", info.getValidateType()); + temp.put("validateRange", info.getValidateRange()); + temp.put("boundaryType", info.getBoundaryType()); + temp.put("defaultValue", info.getDefaultValue()); + // for front-end to judge whether input is required + if (StringUtils.isNotEmpty(info.getDefaultValue())) { + temp.put("require", "true"); + } else { + temp.put("require", "false"); + } + } + + keys.add(temp); + } + item.put("itemList", keys); + result.add(item); + } + return result; + } + + @Override + public Map apply( + String templateUid, + String application, + String engineType, + String engineVersion, + String operator, + List userList) + throws ConfigurationException { + List successList = new ArrayList<>(); + List errorList = new ArrayList<>(); + + // get the associated config itsm list + List templateUuidList = new ArrayList<>(); + templateUuidList.add(templateUid); + List templateConfigKeyList = + templateConfigKeyMapper.selectListByTemplateUuidList(templateUuidList); + if (templateConfigKeyList.size() == 0) { + String msg = + MessageFormat.format( + "The template configuration is empty. Please check the template associated configuration information in the database table" + + "(模板关联的配置为空,请检查数据库表中关于模板id:{0} 关联配置项是否完整)", + templateUid); + throw new ConfigurationException(msg); + } + // check input engineType is same as template key engineType + List keyIdList = + templateConfigKeyList.stream() + .map(e -> e.getKeyId()) + .distinct() + .collect(Collectors.toList()); + + if (keyIdList.size() == 0) { + String msg = "can not get any config key info from db, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + List configKeyList = configMapper.selectKeyByKeyIdList(keyIdList); + // map k:v---> keyId:ConfigKey + Set configKeyEngineTypeSet = + configKeyList.stream().map(ConfigKey::getEngineType).collect(Collectors.toSet()); + + if (configKeyEngineTypeSet == null || configKeyEngineTypeSet.size() == 0) { + String msg = + MessageFormat.format( + "Unable to get configuration parameter information associated with template id:{0}, please check whether the parameters are correct" + + "(无法获取模板:{0} 关联的配置参数信息,请检查参数是否正确)", + templateUid); + throw new ConfigurationException(msg); + } + + if (configKeyEngineTypeSet.size() != 1 || !configKeyEngineTypeSet.contains(engineType)) { + String msg = + MessageFormat.format( + "The engineType:{0} associated with the template:{1} does not match the input engineType:{2}, please check whether the parameters are correct" + + "(模板关联的引擎类型:{0} 和下发的引擎类型:{2} 不匹配,请检查参数是否正确)", + String.join(",", configKeyEngineTypeSet), templateUid, engineType); + throw new ConfigurationException(msg); + } + for (String user : userList) { + // try to create combined_userCreator_engineType label for user + Map res = new HashMap(); + res.put("user", user); + try { + CombinedLabel combinedLabel = + configurationService.generateCombinedLabel( + engineType, engineVersion, user, application); + String conbinedLabelKey = combinedLabel.getLabelKey(); + String conbinedLabelStringValue = combinedLabel.getStringValue(); + // check lable is ok + + ConfigLabel configLabel = + labelMapper.getLabelByKeyValue(conbinedLabelKey, conbinedLabelStringValue); + if (null == configLabel || configLabel.getId() < 0) { + configLabel = LabelEntityParser.parseToConfigLabel(combinedLabel); + labelMapper.insertLabel(configLabel); + logger.info("succeed to create label: {}", configLabel.getStringValue()); + } + + // batch update config value + List configValues = new ArrayList<>(); + + List configKeyLimitForUsers = new ArrayList<>(); + + for (TemplateConfigKey templateConfigKey : templateConfigKeyList) { + Long keyId = templateConfigKey.getKeyId(); + String uuid = templateConfigKey.getTemplateUuid(); + String confVal = templateConfigKey.getConfigValue(); + String maxVal = templateConfigKey.getMaxValue(); + + ConfigValue configValue = new ConfigValue(); + configValue.setConfigKeyId(keyId); + configValue.setConfigValue(confVal); + configValue.setConfigLabelId(configLabel.getId()); + configValues.add(configValue); + + ConfigKeyLimitForUser configKeyLimitForUser = new ConfigKeyLimitForUser(); + configKeyLimitForUser.setUserName(user); + configKeyLimitForUser.setCombinedLabelValue(configLabel.getStringValue()); + configKeyLimitForUser.setKeyId(keyId); + configKeyLimitForUser.setConfigValue(confVal); + configKeyLimitForUser.setMaxValue(maxVal); + configKeyLimitForUser.setLatestUpdateTemplateUuid(uuid); + configKeyLimitForUser.setCreateBy(operator); + configKeyLimitForUser.setUpdateBy(operator); + configKeyLimitForUsers.add(configKeyLimitForUser); + } + + if (configValues.size() == 0) { + res.put("msg", "can not get any right key form the db"); + errorList.add(res); + } else { + + DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition(); + TransactionStatus status = + platformTransactionManager.getTransaction(transactionDefinition); + try { + configMapper.batchInsertOrUpdateValueList(configValues); + // batch update user ConfigKeyLimitForUserMapper + configKeyLimitForUserMapper.batchInsertOrUpdateList(configKeyLimitForUsers); + + platformTransactionManager.commit(status); // commit transaction if everything's fine + } catch (Exception ex) { + platformTransactionManager.rollback( + status); // rollback transaction if any error occurred + throw ex; + } + successList.add(res); + } + + } catch (Exception e) { + logger.warn("try to update configurations for user:" + user + " with error", e); + res.put("msg", e.getMessage()); + errorList.add(res); + } + } + + Map result = new HashMap<>(); + + Map successResult = new HashMap<>(); + Map errorResult = new HashMap<>(); + + successResult.put("num", successList.size()); + successResult.put("infoList", successList); + + errorResult.put("num", errorList.size()); + errorResult.put("infoList", errorList); + + result.put("success", successResult); + result.put("error", errorResult); + return result; + } + + @Receiver + @Override + public TemplateConfResponse queryKeyInfoList(TemplateConfRequest templateConfRequest) { + TemplateConfResponse result = new TemplateConfResponse(); + String templateUid = templateConfRequest.getTemplateUuid(); + if (StringUtils.isBlank(templateUid)) { + return result; + } + List voList = + templateConfigKeyMapper.selectInfoListByTemplateUuid(templateUid); + + List data = new ArrayList<>(); + if (voList != null) { + for (TemplateConfigKeyVO temp : voList) { + TemplateConfKey item = new TemplateConfKey(); + item.setTemplateUuid(temp.getTemplateUuid()); + item.setKey(temp.getKey()); + item.setTemplateName(temp.getTemplateName()); + item.setConfigValue(temp.getConfigValue()); + data.add(item); + } + } + result.setList(data); + return result; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java index 8f1cd7f6ea..df64521ad4 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java @@ -105,7 +105,6 @@ public void updateTenant(TenantVo tenantVo) throws ConfigurationException { } dataProcessing(tenantVo); TenantVo tenantVoLowerCase = toLowerCase(tenantVo); - tenantVoLowerCase.setUpdateTime(new Date()); logger.info("updateTenant : {}", tenantVoLowerCase); userTenantMapper.updateTenant(tenantVoLowerCase); } @@ -119,24 +118,24 @@ public void updateTenant(TenantVo tenantVo) throws ConfigurationException { public void createTenant(TenantVo tenantVo) throws ConfigurationException { dataProcessing(tenantVo); TenantVo tenantVoLowerCase = toLowerCase(tenantVo); - tenantVoLowerCase.setUpdateTime(new Date()); tenantVoLowerCase.setCreateTime(new Date()); - logger.info("updateTenant : {}", tenantVoLowerCase); + logger.info("createTenant : {}", tenantVoLowerCase); userTenantMapper.createTenant(tenantVo); } private void dataProcessing(TenantVo tenantVo) throws ConfigurationException { AtomicReference tenantResult = new AtomicReference<>(false); // Obtain the tenant information of the ECM list - Map resultmap = null; + Map ecmListResult = null; try { - resultmap = HttpsUtil.sendHttp(null, null); - logger.info("ResourceMonitor response {}:", resultmap); + ecmListResult = HttpsUtil.sendHttp(null, null); + logger.info("Request ecm list response {}:", ecmListResult); } catch (IOException e) { logger.warn("failed to get ecmResource data"); } - Map>> data = MapUtils.getMap(resultmap, "data"); + Map>> data = MapUtils.getMap(ecmListResult, "data"); List> emNodeVoList = data.get("EMs"); + // Compare ECM list tenant labels for task emNodeVoList.forEach( ecmInfo -> { List> labels = (List>) ecmInfo.get("labels"); @@ -145,7 +144,7 @@ private void dataProcessing(TenantVo tenantVo) throws ConfigurationException { .forEach( map -> { String tenant = map.get("tenant").toString().toLowerCase(); - if (tenant.equals(tenantVo.getTenantValue())) { + if (tenant.equals(tenantVo.getTenantValue().toLowerCase())) { tenantResult.set(true); } }); @@ -153,15 +152,18 @@ private void dataProcessing(TenantVo tenantVo) throws ConfigurationException { // Compare the value of ecm tenant if (!tenantResult.get()) throw new ConfigurationException("The ECM with the corresponding label was not found"); - // The beginning of tenantValue needs to contain creator - String creator = tenantVo.getCreator().toLowerCase(); - String tenantValue = tenantVo.getTenantValue().toLowerCase().split("_")[0]; - if (!creator.equals(tenantValue)) - throw new ConfigurationException("tenantValue should contain creator first"); + if (!tenantVo.getCreator().equals("*")) { + // The beginning of tenantValue needs to contain creator + String creator = tenantVo.getCreator().toLowerCase(); + String[] tenantArray = tenantVo.getTenantValue().toLowerCase().split("_"); + if (tenantArray.length > 1 && !creator.equals(tenantArray[0])) { + throw new ConfigurationException("tenantValue should contain creator first"); + } + } } @Override - public Boolean userExists(String user, String creator, String tenantValue) { + public Boolean isExist(String user, String creator) { boolean result = true; Map resultMap = queryTenantList(user.toLowerCase(), creator.toLowerCase(), null, 1, 20); @@ -180,6 +182,7 @@ public TenantVo toLowerCase(TenantVo tenantVo) { tenantVo.setTenantValue(tenantVo.getTenantValue().toLowerCase()); tenantVo.setCreator(tenantVo.getCreator().toLowerCase()); tenantVo.setUser(tenantVo.getUser().toLowerCase()); + tenantVo.setUpdateTime(new Date()); return tenantVo; } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java index ed80f09a0a..12e049595a 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java @@ -17,6 +17,14 @@ package org.apache.linkis.configuration.util; +import org.apache.linkis.configuration.conf.AcrossClusterConfiguration; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + public class CommonUtils { public static boolean ipCheck(String str) { if (str != null && !str.isEmpty()) { @@ -28,4 +36,32 @@ public static boolean ipCheck(String str) { } return false; } + + public static String ruleMap2String( + String startTime, + String endTime, + String CPUThreshold, + String MemoryThreshold, + String CPUPercentageThreshold, + String MemoryPercentageThreshold) + throws JsonProcessingException { + Map queueRuleMap = new HashMap<>(); + Map timeRuleMap = new HashMap<>(); + Map thresholdRuleMap = new HashMap<>(); + Map ruleMap = new HashMap<>(); + queueRuleMap.put("suffix", AcrossClusterConfiguration.ACROSS_CLUSTER_QUEUE_SUFFIX()); + timeRuleMap.put("startTime", startTime); + timeRuleMap.put("endTime", endTime); + thresholdRuleMap.put("CPUThreshold", CPUThreshold); + thresholdRuleMap.put("MemoryThreshold", MemoryThreshold); + thresholdRuleMap.put("CPUPercentageThreshold", CPUPercentageThreshold); + thresholdRuleMap.put("MemoryPercentageThreshold", MemoryPercentageThreshold); + ruleMap.put("queueRule", queueRuleMap); + ruleMap.put("timeRule", timeRuleMap); + ruleMap.put("thresholdRule", thresholdRuleMap); + ObjectMapper map2Json = new ObjectMapper(); + String rules = map2Json.writeValueAsString(ruleMap); + + return rules; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/AcrossClusterRuleMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/AcrossClusterRuleMapper.xml new file mode 100644 index 0000000000..6997963313 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/AcrossClusterRuleMapper.xml @@ -0,0 +1,139 @@ + + + + + + + + + + + + + + + + + + + + + + id,cluster_name,creator,user,create_time,create_by,update_time,update_by,rules,is_valid + + + + cluster_name,creator,user,create_time,create_by,update_time,update_by,rules,is_valid + + + + + + + + + INSERT INTO + linkis_cg_lm_across_cluster_rule () + VALUES + (#{acrossClusterRule.clusterName},#{acrossClusterRule.creator}, #{acrossClusterRule.user}, + + + #{acrossClusterRule.createTime} + + + now() + + , + #{acrossClusterRule.createBy}, + + + #{acrossClusterRule.updateTime} + + + now() + + , + #{acrossClusterRule.updateBy},#{acrossClusterRule.rules}, #{acrossClusterRule.isValid}) + + + + DELETE + FROM + `linkis_cg_lm_across_cluster_rule` + WHERE + creator = #{creator} AND user = #{user} + + + + UPDATE + `linkis_cg_lm_across_cluster_rule` + SET + cluster_name = #{acrossClusterRule.clusterName}, creator = #{acrossClusterRule.creator}, + user=#{acrossClusterRule.user}, create_time=#{acrossClusterRule.createTime}, + create_By=#{acrossClusterRule.createBy}, + + + update_time=#{acrossClusterRule.updateTime} + + + update_time = now() + + , + update_By=#{acrossClusterRule.updateBy}, rules=#{acrossClusterRule.rules}, + is_valid=#{acrossClusterRule.isValid} + WHERE + id = #{acrossClusterRule.id} + + + + UPDATE + `linkis_cg_lm_across_cluster_rule` + SET + + + update_time=#{acrossClusterRule.updateTime} + + + update_time = now() + + , + update_By=#{acrossClusterRule.updateBy}, + is_valid=#{acrossClusterRule.isValid} + WHERE + id = #{acrossClusterRule.id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigKeyLimitForUserMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigKeyLimitForUserMapper.xml new file mode 100644 index 0000000000..74d1749105 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigKeyLimitForUserMapper.xml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + id, user_name, combined_label_value, key_id, config_value, max_value, min_value, latest_update_template_uuid, is_valid, + create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_key_limit_for_user ( + id, user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + is_valid, create_by, create_time, update_by, + update_time) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, + #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.isValid,jdbcType=VARCHAR}, #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, + now()) + + + + + update linkis_ps_configuration_key_limit_for_user + set user_name = #{userName,jdbcType=VARCHAR}, + combined_label_value = #{combinedLabelValue,jdbcType=VARCHAR}, + key_id = #{keyId,jdbcType=BIGINT}, + config_value = #{configValue,jdbcType=VARCHAR}, + max_value = #{maxValue,jdbcType=VARCHAR}, + min_value = #{minValue,jdbcType=VARCHAR}, + latest_update_template_uuid = #{latestUpdateTemplateUuid,jdbcType=VARCHAR}, + is_valid = #{isValid,jdbcType=VARCHAR}, + create_by = #{createBy,jdbcType=VARCHAR}, + update_by = #{updateBy,jdbcType=VARCHAR}, + update_time = now() + where id = #{id,jdbcType=BIGINT} + + + + + + insert into linkis_ps_configuration_key_limit_for_user (user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + create_by, create_time, update_by, + update_time) + values + + ( + #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + max_value =values(max_value), + latest_update_template_uuid =values(latest_update_template_uuid), + update_by =values(update_by), + update_time= now() + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigMapper.xml index a5c0af0ef5..f0fe9ce520 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/ConfigMapper.xml @@ -24,7 +24,6 @@ - @@ -32,8 +31,7 @@ - - + @@ -51,8 +49,8 @@ - - + + @@ -78,9 +76,20 @@ + + + + + + + + + + + - `id`, `key`, `description`, `name`, `engine_conn_type`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName` + `id`, `key`, `description`, `name`, `engine_conn_type`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type` @@ -113,9 +122,19 @@ INSERT INTO linkis_ps_configuration_config_key ( - id, key, description, name, engine_conn_type, default_value, validate_type, validate_range, is_hidden, is_advanced, level, treeName) + `id`, `key`, `description`, + `name`, `engine_conn_type`, `default_value`, + `validate_type`, `validate_range`, `is_hidden`, + `is_advanced`, `level`, `treeName`, + `boundary_type`, `en_name`, `en_treeName`, + `en_description`) VALUES ( - #{id}, #{key}, #{description}, #{name}, #{engineType}, #{defaultValue}, #{validateType}, #{validateRange}, #{isHidden}, #{isAdvanced}, #{level}, #{treeName}) + #{id}, #{key}, #{description}, + #{name}, #{engineType}, #{defaultValue}, + #{validateType}, #{validateRange}, #{isHidden}, + #{isAdvanced}, #{level}, #{treeName}, + #{boundaryType}, #{enName}, #{enTreeName}, + #{enDescription}) + + + + + + + + + + + + + + SELECT * FROM linkis_ps_configuration_config_value WHERE id = #{id} @@ -257,4 +363,118 @@ WHERE id = #{categoryId} + + DELETE FROM linkis_ps_configuration_config_key + WHERE id = #{id} + + + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_key ( + `key`, `description`, `name`, + `engine_conn_type`, `default_value`, `validate_type`, + `validate_range`, `is_hidden`, `is_advanced`, + `level`, `treeName`, `boundary_type`, + `en_name`, `en_treeName`, `en_description` + ) + VALUES ( + #{key}, #{description}, #{name}, + #{engineType}, #{defaultValue}, #{validateType}, + #{validateRange}, #{isHidden}, #{isAdvanced}, + #{level}, #{treeName}, #{boundaryType}, + #{enName}, #{enTreeName}, #{enDescription} + ) + + + + UPDATE linkis_ps_configuration_config_key + + `key` = #{key}, + `name` = #{name}, + `description` = #{description}, + `engine_conn_type` = #{engineType}, + `default_value` = #{defaultValue}, + `validate_type` = #{validateType}, + `validate_range` = #{validateRange}, + `validate_range` = #{validateRange}, + `treeName` = #{treeName}, + `boundary_type` = #{boundaryType}, + `en_name` = #{enName}, + `en_treeName` = #{enTreeName}, + `en_description` = #{enDescription}, + + WHERE id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml index d4cea8e29a..34783107fb 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml @@ -50,6 +50,16 @@ where label_key = #{labelKey} and label_value = #{stringValue} + + INSERT INTO linkis_cg_manager_label( label_key, label_value,label_feature, label_value_size, update_time, create_time) @@ -57,6 +67,17 @@ #{labelKey}, #{stringValue}, #{feature}, #{labelValueSize}, now(), now()) + + + INSERT IGNORE INTO linkis_cg_manager_label( + label_key, label_value,label_feature, label_value_size, update_time, create_time) + VALUES + + #{item.labelKey}, #{item.stringValue}, #{item.feature}, #{item.labelValueSize}, now(), now() + + + + DELETE FROM linkis_cg_manager_label WHERE id IN diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/TemplateConfigKeyMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/TemplateConfigKeyMapper.xml new file mode 100644 index 0000000000..fbc02a3f4f --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/TemplateConfigKeyMapper.xml @@ -0,0 +1,141 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, template_name, template_uuid, key_id, config_value, max_value, min_value, validate_range, + is_valid, create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_template_config_key (id, template_name, template_uuid, + key_id, config_value, max_value, + min_value, validate_range, is_valid, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.validateRange,jdbcType=VARCHAR}, #{item.isValid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, now() + ) + + + + + + + delete + from linkis_ps_configuration_template_config_key + where template_uuid = #{templateUuid,jdbcType=VARCHAR} + and key_id in + + #{item} + + + + + insert into linkis_ps_configuration_template_config_key (template_name, template_uuid, + key_id, config_value, max_value, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + template_name =values(template_name), + config_value =values(config_value), + max_value =values(max_value), + update_by=values(update_by), + update_time= now() + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/conf/Configuration.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/conf/Configuration.scala index 1721241ab1..963837adef 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/conf/Configuration.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/conf/Configuration.scala @@ -43,4 +43,7 @@ object Configuration { val USE_CREATOR_DEFAULE_VALUE = CommonVars.apply("wds.linkis.configuration.use.creator.default.value", true).getValue + val REMOVE_APPLICATION_CACHE = + CommonVars.apply("linkis.configuration.remove.application.cache", "IDE").getValue + } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala index 1a0f714522..3f86697254 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala @@ -17,9 +17,9 @@ package org.apache.linkis.configuration.service -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.configuration.conf.Configuration -import org.apache.linkis.configuration.dao.{ConfigMapper, LabelMapper} +import org.apache.linkis.configuration.dao.{ConfigKeyLimitForUserMapper, ConfigMapper, LabelMapper} import org.apache.linkis.configuration.entity._ import org.apache.linkis.configuration.exception.ConfigurationException import org.apache.linkis.configuration.util.{LabelEntityParser, LabelParameterParser} @@ -42,6 +42,7 @@ import org.springframework.stereotype.Service import org.springframework.transaction.annotation.Transactional import org.springframework.util.CollectionUtils +import java.text.MessageFormat import java.util import scala.collection.JavaConverters._ @@ -57,6 +58,8 @@ class ConfigurationService extends Logging { @Autowired private var validatorManager: ValidatorManager = _ + @Autowired private var configKeyLimitForUserMapper: ConfigKeyLimitForUserMapper = _ + private val combinedLabelBuilder: CombinedLabelBuilder = new CombinedLabelBuilder @Transactional @@ -93,12 +96,6 @@ class ConfigurationService extends Logging { } } - def insertCreator(creator: String): Unit = { - val creatorID: Long = configMapper.selectAppIDByAppName(creator) - if (creatorID > 0) configMapper.insertCreator(creator) - else logger.warn(s"creator${creator} exists") - } - def checkAndCreateUserLabel( settings: util.List[ConfigKeyValue], username: String, @@ -178,6 +175,33 @@ class ConfigurationService extends Logging { createList: util.List[ConfigValue], updateList: util.List[ConfigValue] ): Any = { + + val configLabel = labelMapper.getLabelById(setting.getConfigLabelId) + val combinedLabel = combinedLabelBuilder + .buildFromStringValue(configLabel.getLabelKey, configLabel.getStringValue) + .asInstanceOf[CombinedLabel] + val templateConfigKeyVo = + configKeyLimitForUserMapper.selectByLabelAndKeyId(combinedLabel.getStringValue, setting.getId) + if (templateConfigKeyVo != null && StringUtils.isNotBlank(templateConfigKeyVo.getMaxValue)) { + Utils.tryCatch { + val maxValue = Integer.valueOf(templateConfigKeyVo.getMaxValue.replaceAll("[^0-9]", "")) + val configValue = Integer.valueOf(setting.getConfigValue.replaceAll("[^0-9]", "")) + if (configValue > maxValue) { + throw new ConfigurationException( + s"Parameter key:${setting.getKey},config value:${setting.getConfigValue} verification failed,exceeds the specified max value:${templateConfigKeyVo.getMaxValue}:(参数校验失败,超过指定的最大值):" + + s"${setting.getValidateType}--${setting.getValidateRange}" + ) + } + } { case exception: Exception => + if (exception.isInstanceOf[ConfigurationException]) { + throw exception + } else { + logger.warn( + s"Failed to check special limit setting for key:${setting.getKey},config value:${setting.getConfigValue}" + ) + } + } + } paramCheck(setting) if (setting.getIsUserDefined) { val configValue = new ConfigValue @@ -259,6 +283,12 @@ class ConfigurationService extends Logging { combinedLabel.asInstanceOf[CombinedLabelImpl] } + /** + * Priority: configs > defaultConfigs + * @param configs + * @param defaultConfigs + * @return + */ def buildTreeResult( configs: util.List[ConfigKeyValue], defaultConfigs: util.List[ConfigKeyValue] = new util.ArrayList[ConfigKeyValue]() @@ -269,9 +299,8 @@ class ConfigurationService extends Logging { defaultConfig.setIsUserDefined(false) configs.asScala.foreach(config => { if (config.getKey != null && config.getKey.equals(defaultConfig.getKey)) { - if (StringUtils.isNotBlank(config.getConfigValue)) { - defaultConfig.setConfigValue(config.getConfigValue) - } + // configValue also needs to be replaced when the value is empty + defaultConfig.setConfigValue(config.getConfigValue) defaultConfig.setConfigLabelId(config.getConfigLabelId) defaultConfig.setValueId(config.getValueId) defaultConfig.setIsUserDefined(true) @@ -380,6 +409,35 @@ class ConfigurationService extends Logging { replaceCreatorToEngine(defaultCreatorConfigs, defaultEngineConfigs) } } + + // add special config limit info + if (defaultEngineConfigs.size() > 0) { + val keyIdList = defaultEngineConfigs.asScala.toStream + .map(e => { + e.getId + }) + .toList + .asJava + val limitList = + configKeyLimitForUserMapper.selectByLabelAndKeyIds(combinedLabel.getStringValue, keyIdList) + defaultEngineConfigs.asScala.foreach(entity => { + val keyId = entity.getId + val res = limitList.asScala.filter(v => v.getKeyId == keyId).toList.asJava + if (res.size() > 0) { + val specialMap = new util.HashMap[String, String]() + val maxValue = res.get(0).getMaxValue + if (StringUtils.isNotBlank(maxValue)) { + specialMap.put("maxValue", maxValue) + entity.setSpecialLimit(specialMap) + } + } + }) + } else { + logger.warn( + s"The configuration is empty. Please check the configuration information in the database table(配置为空,请检查数据库表中关于标签${combinedLabel.getStringValue}的配置信息是否完整)" + ) + } + (configs, defaultEngineConfigs) } diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java new file mode 100644 index 0000000000..a7ef5a1c14 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitForUser; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; +import java.util.UUID; + +import org.instancio.Instancio; +import org.instancio.Select; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ConfigKeyLimitForUserMapperTest extends BaseDaoTest { + + @Autowired ConfigKeyLimitForUserMapper configKeyLimitForUserMapper; + + String uuid = UUID.randomUUID().toString(); + String name = "for-test"; + + private List initData() { + List list = + Instancio.ofList(ConfigKeyLimitForUser.class) + .generate(Select.field(ConfigKeyLimitForUser::getIsValid), gen -> gen.oneOf("Y", "N")) + .create(); + ConfigKeyLimitForUser configKeyLimitForUser = new ConfigKeyLimitForUser(); + configKeyLimitForUser.setUserName("testuser"); + configKeyLimitForUser.setCombinedLabelValue("IDE-hadoop,spark-2.3.3"); + configKeyLimitForUser.setKeyId(1L); + configKeyLimitForUser.setLatestUpdateTemplateUuid(uuid); + configKeyLimitForUser.setCreateBy("test"); + configKeyLimitForUser.setUpdateBy("test"); + list.add(configKeyLimitForUser); + configKeyLimitForUserMapper.batchInsertList(list); + return list; + } + + @Test + void batchInsertOrUpdateListTest() { + List list = initData(); + list.get(1).setLatestUpdateTemplateUuid("123456"); + int isOk = configKeyLimitForUserMapper.batchInsertOrUpdateList(list); + Assertions.assertTrue(isOk > 1); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java index c636b91359..c5572ceff9 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java @@ -154,12 +154,6 @@ void testListKeyByStringValue() { // assertEquals(7, configKeyList.size()); } - @Test - void testInsertCreator() { - // mapper方法没有对应的实现类 - // configMapper.insertCreator("tom"); - } - @Test void testGetCategory() { List categoryLabelList = configMapper.getCategory(); @@ -203,6 +197,7 @@ void testUpdateCategory() { void testInsertKey() { ConfigKey configKey = new ConfigKey(); configKey.setKey("wds.linkis.rm.instance.max.max"); + configKey.setBoundaryType(3); configMapper.insertKey(configKey); ConfigKey result = configMapper.selectKeyByKeyID(8L); // assertEquals("wds.linkis.rm.instance.max.max", result.getKey()); diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java new file mode 100644 index 0000000000..64b12ba7f4 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.TemplateConfigKey; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import org.instancio.Instancio; +import org.instancio.Select; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class TemplateConfigKeyMapperTest extends BaseDaoTest { + + @Autowired TemplateConfigKeyMapper templateConfigKeyMapper; + String uuid = UUID.randomUUID().toString(); + String name = "for-test"; + + private List initData() { + List list = + Instancio.ofList(TemplateConfigKey.class) + .size(9) + .generate(Select.field(TemplateConfigKey::getIsValid), gen -> gen.oneOf("Y", "N")) + .create(); + + TemplateConfigKey templateConfigKey = new TemplateConfigKey(); + templateConfigKey.setTemplateName(name); + templateConfigKey.setTemplateUuid(uuid); + templateConfigKey.setKeyId(1L); + templateConfigKey.setConfigValue("3"); + templateConfigKey.setMaxValue("8"); + templateConfigKey.setCreateBy("test"); + templateConfigKey.setUpdateBy("test"); + list.add(templateConfigKey); + templateConfigKeyMapper.batchInsertList(list); + return list; + } + + @Test + void selectListByTemplateUuid() { + initData(); + List res = templateConfigKeyMapper.selectListByTemplateUuid(uuid); + assertEquals(res.size(), 1); + assertEquals(res.get(0).getTemplateName(), name); + } + + @Test + void deleteByTemplateUuidAndKeyIdList() { + List list = initData(); + List KeyIdList = new ArrayList<>(); + KeyIdList.add(1L); + int num = templateConfigKeyMapper.deleteByTemplateUuidAndKeyIdList(uuid, KeyIdList); + assertEquals(num, 1); + } + + @Test + void batchInsertOrUpdateList() { + List list = initData(); + list.get(1).setConfigValue("20"); + int isOK = templateConfigKeyMapper.batchInsertOrUpdateList(list); + Assertions.assertTrue(isOK >= 1); + } + + @Test + void selectListByTemplateUuidList() { + List list = initData(); + List templateUuidList = new ArrayList<>(); + templateUuidList.add(uuid); + templateUuidList.add("123456"); + List res = + templateConfigKeyMapper.selectListByTemplateUuidList(templateUuidList); + Assertions.assertTrue(res.size() == 1); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java index 77c77d926f..41803098d0 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java @@ -57,7 +57,7 @@ public void TestAddKeyForEngine() throws Exception { paramsMap.add("engineType", "spark"); paramsMap.add("version", "2.4.3"); paramsMap.add("token", "e8724-e"); - paramsMap.add("keyJson", "{'engineType':'spark','version':'2.4.3'}"); + paramsMap.add("keyJson", "{'engineType':'spark','version':'2.4.3','boundaryType':3}"); String url = "/configuration/addKeyForEngine"; sendUrl(url, paramsMap, "get", null); } diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql b/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql index 75a3ad195a..b96eb7c763 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql @@ -48,6 +48,10 @@ CREATE TABLE `linkis_ps_configuration_config_key` `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', PRIMARY KEY (`id`) ); @@ -86,4 +90,44 @@ CREATE TABLE `linkis_ps_configuration_category` `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE INDEX (`label_id`) +); + + +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE `linkis_ps_configuration_template_config_key` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `template_name` varchar(200) NOT NULL COMMENT '配置模板名称 冗余存储', + `template_uuid` varchar(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `key_id` int(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` varchar(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` varchar(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` varchar(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `validate_range` varchar(50) NULL DEFAULT NULL COMMENT '校验正则(预留) ', + `is_valid` varchar(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` varchar(50) NOT NULL COMMENT '创建人', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` varchar(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`) +); + + +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE `linkis_ps_configuration_key_limit_for_user` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(50) NOT NULL COMMENT '用户名', + `combined_label_value` varchar(200) NOT NULL COMMENT '组合标签 combined_userCreator_engineType 如 hadoop-IDE,spark-2.4.3', + `key_id` int(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` varchar(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` varchar(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` varchar(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `latest_update_template_uuid` varchar(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `is_valid` varchar(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` varchar(50) NOT NULL COMMENT '创建人', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` varchar(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) ); \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql b/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql index 5b79639f12..4137dbbf16 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql @@ -22,13 +22,13 @@ insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_featur insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源',3); insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (1,1); insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (2,1); diff --git a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/main/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandler.java b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/main/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandler.java index a5c8d638c7..aa444ac55d 100644 --- a/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/main/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandler.java +++ b/linkis-public-enhancements/linkis-error-code/linkis-error-code-client/src/main/java/org/apache/linkis/errorcode/client/handler/LinkisErrorCodeHandler.java @@ -73,7 +73,7 @@ public static LinkisErrorCodeHandler getInstance() { return linkisErrorCodeHandler; } - private LinkisErrorCodeHandler() {} + public LinkisErrorCodeHandler() {} static { // Initialize our timing thread and other thread pools through the getInstance method. diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala index efaf43818c..677898352a 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-client/src/main/scala/org/apache/linkis/instance/label/client/InstanceLabelClient.scala @@ -62,41 +62,36 @@ class InstanceLabelClient extends Logging { def getLabelFromInstance(serviceInstance: ServiceInstance): util.List[Label[_]] = { val request = new InsLabelQueryRequest(serviceInstance) - Utils.tryAndError { - getSender().ask(request) match { + Utils.tryAndWarn { + val respObj = getSender().ask(request) + respObj match { case resp: InsLabelQueryResponse => val labelList = new util.ArrayList[Label[_]]() resp.getLabelList.asScala.foreach(pair => labelList.add(labelBuilderFactory.createLabel[Label[_]](pair.getKey, pair.getValue)) ) labelList - case o => - logger.error(s"Invalid response ${BDPJettyServerHelper.gson - .toJson(o)} from request : ${BDPJettyServerHelper.gson.toJson(request)}") + case _ => + logger.warn(s"Invalid resp :$respObj from request : $request") new util.ArrayList[Label[_]] } } } def getInstanceFromLabel(labels: util.List[Label[_]]): util.List[ServiceInstance] = { - Utils.tryAndError { + Utils.tryAndWarn { val request = new LabelInsQueryRequest() val labelMap = LabelUtils.labelsToMap(labels) request.setLabels(labelMap.asInstanceOf[util.HashMap[String, Object]]) - Sender.getSender(PUBLIC_SERVICE_APPLICATION_NAME.getValue).ask(request) match { + val respObj = getSender().ask(request) + respObj match { case resp: LabelInsQueryResponse => if (null == resp.getInsList || resp.getInsList.isEmpty) { return new util.ArrayList[ServiceInstance]() } - if (resp.getInsList.size() != 1) { - logger.warn( - s"Instance num ${resp.getInsList.size()} with labels ${BDPJettyServerHelper.gson.toJson(labelMap)} is not single one." - ) - } resp.getInsList - case o => - logger.error(s"Invalid resp : ${JsonUtils.jackson - .writeValueAsString(o)} from request : ${BDPJettyServerHelper.gson.toJson(request)}") + case _ => + logger.warn(s"Invalid resp :$respObj from request : $request") new util.ArrayList[ServiceInstance]() } } diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java index 5cc3bcc633..3b7aaf4c4b 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java @@ -140,12 +140,6 @@ public void refreshLabelsToInstance( List> labels, ServiceInstance serviceInstance) throws InstanceErrorException { List insLabels = toInsPersistenceLabels(labels); - // Label candidate to be removed - List labelsCandidateRemoved = - insLabelRelationDao.searchLabelsByInstance(serviceInstance.getInstance()); - if (!labelsCandidateRemoved.isEmpty()) { - labelsCandidateRemoved.removeAll(insLabels); - } LOG.info("Drop relationships related by instance: [" + serviceInstance.getInstance() + "]"); insLabelRelationDao.dropRelationsByInstance(serviceInstance.getInstance()); // Attach labels to instance diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/common/InsLabelRelationMapper.xml b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/common/InsLabelRelationMapper.xml index 2a5d8a3d45..f11c80624d 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/common/InsLabelRelationMapper.xml +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/common/InsLabelRelationMapper.xml @@ -197,7 +197,9 @@ - \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java index c83d730b8b..7ff5aeb32d 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java @@ -195,12 +195,7 @@ public void refreshUndoneTask() { Date sDate = DateUtils.addDays(eDate, -1); queryTasks = jobHistoryMapper.searchWithIdOrderAsc( - undoneTaskMinId, - null, - Arrays.asList("Running", "Inited", "Scheduled"), - sDate, - eDate, - null); + sDate, eDate, undoneTaskMinId, Arrays.asList("Running", "Inited", "Scheduled")); } finally { PageHelper.clearPage(); } diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java index 7bb7656346..c25eee4a2e 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java @@ -33,12 +33,10 @@ public interface JobHistoryMapper { void updateJobHistory(JobHistory jobReq); List searchWithIdOrderAsc( - @Param("id") Long id, - @Param("umUser") String username, - @Param("status") List status, @Param("startDate") Date startDate, @Param("endDate") Date endDate, - @Param("engineType") String engineType); + @Param("startId") Long startId, + @Param("status") List status); List search( @Param("id") Long id, diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/errorcode/JobhistoryErrorCodeSummary.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/errorcode/JobhistoryErrorCodeSummary.java index 5c62944d5e..3cd7137261 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/errorcode/JobhistoryErrorCodeSummary.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/errorcode/JobhistoryErrorCodeSummary.java @@ -22,7 +22,7 @@ public enum JobhistoryErrorCodeSummary implements LinkisErrorCode { UNFINISHED_TASKS( 20020, - "entrance service restart, automatically cancel tasks that have not been completed for a long time (24h)"); + "Your job will be marked as canceled because the Entrance service restarted(因为Entrance服务重启,您的任务将被标记为取消)"); /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/common/JobHistoryMapper.xml b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/common/JobHistoryMapper.xml index 824d989c60..9c96c85eb1 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/common/JobHistoryMapper.xml +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/common/JobHistoryMapper.xml @@ -98,13 +98,11 @@ @@ -113,21 +114,57 @@ - update linkis_ps_udf_user_load set user_name=#{newUser} where udf_id=#{udfId} and user_name=#{oldUser} + update linkis_ps_udf_user_load set user_name=#{newUser},update_time =now() where udf_id=#{udfId} and user_name=#{oldUser} - update linkis_ps_udf_version set is_published=#{isPublished} where udf_id=#{udfId} and + update linkis_ps_udf_version set is_published=#{isPublished},update_time =now() where udf_id=#{udfId} and bml_resource_version=#{version} @@ -92,7 +92,7 @@ - update linkis_ps_udf_version set bml_resource_id=#{resourceId},path=replace(`path`,#{oldUser},#{newUser}) + update linkis_ps_udf_version set bml_resource_id=#{resourceId},path=replace(`path`,#{oldUser},#{newUser}),update_time=now() where udf_id=#{udfId} @@ -101,6 +101,7 @@ SET path=#{path,jdbcType=VARCHAR}, register_format=#{registerFormat,jdbcType=VARCHAR}, use_format=#{useFormat,jdbcType=VARCHAR}, + update_time=now(), description=#{description,jdbcType=VARCHAR} WHERE id = #{id,jdbcType=BIGINT} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql index c22d27b529..f8c41badc7 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql @@ -20,11 +20,13 @@ SET REFERENTIAL_INTEGRITY FALSE; DROP TABLE IF EXISTS linkis_ps_udf_user_load CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_user_load ( - id bigint(20) NOT NULL AUTO_INCREMENT, - udf_id bigint(20) NOT NULL, - user_name varchar(50) NOT NULL, - PRIMARY KEY (id) -) ; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `udf_id` bigint(20) NOT NULL, + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS linkis_ps_udf_baseinfo CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_baseinfo ( @@ -67,6 +69,7 @@ CREATE TABLE IF NOT EXISTS linkis_ps_udf_version ( use_format varchar(255) DEFAULT NULL, description varchar(255) NOT NULL COMMENT 'version desc', create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, md5 varchar(100) DEFAULT NULL, PRIMARY KEY (id) ) ; @@ -82,7 +85,9 @@ CREATE TABLE IF NOT EXISTS linkis_ps_udf_shared_info ( DROP TABLE IF EXISTS linkis_ps_udf_manager CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_manager ( - id bigint(20) NOT NULL AUTO_INCREMENT, - user_name varchar(20) DEFAULT NULL, - PRIMARY KEY (id) -) ; \ No newline at end of file + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; \ No newline at end of file diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java index 5abff15380..5679fe0640 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/java/org/apache/linkis/gateway/authentication/errorcode/LinkisGwAuthenticationErrorCodeSummary.java @@ -20,12 +20,17 @@ import org.apache.linkis.common.errorcode.LinkisErrorCode; public enum LinkisGwAuthenticationErrorCodeSummary implements LinkisErrorCode { - TOKEN_IS_NULL(15205, "token is null(token 令牌为空)!"), - FAILED_TO_LOAD_TOKEN(15200, "Failed to load token from DB into cache(无法将 token 令牌从数据库加载到缓存中)!"), - TOKEN_VALID_OR_STALE(15201, "Token is not valid or stale(token 令牌无效或已过期)!"), - ILLEGAL_TOKENUSER(15202, "Illegal TokenUser for Token(Token非法用户)!"), - ILLEGAL_HOST(15203, "Illegal Host for Token(Token非法主机)!"), - INVALID_TOKEN(15204, "Invalid Token(令牌无效)"); + FAILED_TO_LOAD_TOKEN( + 15200, + "Failed to load token:{0} from DB into cache(无法将 Token:{0} 令牌从数据库加载到缓存中),Caused by:{1}"), + TOKEN_IS_EXPIRED(15201, "Token is not valid or stale({0} 令牌已过期)!"), + ILLEGAL_TOKENUSER(15202, "Illegal TokenUser for Token(Token非法用户: {0})!"), + ILLEGAL_HOST(15203, "Illegal Host for Token(非法ip: {0})!"), + INVALID_TOKEN(15204, "Invalid Token(数据库中未配置的无效令牌)"), + TOKEN_IS_NULL(15205, "token is null({0} 令牌参数为空)!"), + FAILED_TO_BAD_SQLGRAMMAR( + 15206, "Failed to query token:{0} data(Token:{0} 数据查询失败), Caused by:{1}"), + NOT_EXIST_DB(15207, "Token:{0} does not exist in the table(Token:{0} 表中不存在)!, Caused by:{1}"); /** (errorCode)错误码 */ private final int errorCode; diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala index e839d0b5b5..91f10cc7bf 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/main/scala/org/apache/linkis/gateway/authentication/service/CachedTokenService.scala @@ -23,19 +23,21 @@ import org.apache.linkis.gateway.authentication.bo.impl.TokenImpl import org.apache.linkis.gateway.authentication.conf.TokenConfiguration import org.apache.linkis.gateway.authentication.dao.TokenDao import org.apache.linkis.gateway.authentication.entity.TokenEntity +import org.apache.linkis.gateway.authentication.errorcode.LinkisGwAuthenticationErrorCodeSummary import org.apache.linkis.gateway.authentication.errorcode.LinkisGwAuthenticationErrorCodeSummary._ import org.apache.linkis.gateway.authentication.exception.{ TokenAuthException, TokenNotExistException } -import org.apache.linkis.gateway.authentication.exception.TokenNotExistException import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Service +import java.text.MessageFormat import java.util.concurrent.{ExecutionException, TimeUnit} import com.google.common.cache.{CacheBuilder, CacheLoader, LoadingCache} +import com.google.common.util.concurrent.UncheckedExecutionException @Service class CachedTokenService extends TokenService { @@ -59,9 +61,9 @@ class CachedTokenService extends TokenService { }); -// def setTokenDao(tokenDao: TokenDao): Unit = { -// this.tokenDao = tokenDao -// } + // def setTokenDao(tokenDao: TokenDao): Unit = { + // this.tokenDao = tokenDao + // } /* TODO begin @@ -104,28 +106,41 @@ class CachedTokenService extends TokenService { private def loadTokenFromCache(tokenName: String): Token = { if (tokenName == null) { - throw new TokenAuthException(TOKEN_IS_NULL.getErrorCode, TOKEN_IS_NULL.getErrorDesc) + throw new TokenAuthException( + TOKEN_IS_NULL.getErrorCode, + MessageFormat.format(TOKEN_IS_NULL.getErrorDesc, tokenName) + ) } Utils.tryCatch(tokenCache.get(tokenName))(t => t match { case x: ExecutionException => x.getCause match { - case _: TokenNotExistException => null - case _ => - throw new TokenAuthException( - FAILED_TO_LOAD_TOKEN.getErrorCode, - FAILED_TO_LOAD_TOKEN.getErrorDesc - ) + case e: TokenNotExistException => + throwTokenAuthException(NOT_EXIST_DB, tokenName, e) + case e => + throwTokenAuthException(FAILED_TO_LOAD_TOKEN, tokenName, e) } - case _ => - throw new TokenAuthException( - FAILED_TO_LOAD_TOKEN.getErrorCode, - FAILED_TO_LOAD_TOKEN.getErrorDesc - ) + case e: UncheckedExecutionException => + throwTokenAuthException(FAILED_TO_BAD_SQLGRAMMAR, tokenName, e) + case e => + throwTokenAuthException(FAILED_TO_LOAD_TOKEN, tokenName, e) } ) } + private def throwTokenAuthException( + gwAuthenticationErrorCodeSummary: LinkisGwAuthenticationErrorCodeSummary, + tokenName: String, + e: Throwable + ) = { + val exception = new TokenAuthException( + gwAuthenticationErrorCodeSummary.getErrorCode, + MessageFormat.format(gwAuthenticationErrorCodeSummary.getErrorDesc, tokenName, e.getMessage) + ) + exception.initCause(e) + throw exception + } + private def isTokenAcceptableWithUser(token: Token, userName: String): Boolean = { token != null && !token.isStale() && token.isUserLegal(userName) } @@ -153,20 +168,27 @@ class CachedTokenService extends TokenService { override def doAuth(tokenName: String, userName: String, host: String): Boolean = { val tmpToken: Token = loadTokenFromCache(tokenName) var ok: Boolean = true + // token expired if (!isTokenValid(tmpToken)) { ok = false throw new TokenAuthException( - TOKEN_VALID_OR_STALE.getErrorCode, - TOKEN_VALID_OR_STALE.getErrorDesc + TOKEN_IS_EXPIRED.getErrorCode, + MessageFormat.format(TOKEN_IS_EXPIRED.getErrorDesc, tokenName) ) } if (!isTokenAcceptableWithUser(tmpToken, userName)) { ok = false - throw new TokenAuthException(ILLEGAL_TOKENUSER.getErrorCode, ILLEGAL_TOKENUSER.getErrorDesc) + throw new TokenAuthException( + ILLEGAL_TOKENUSER.getErrorCode, + MessageFormat.format(ILLEGAL_TOKENUSER.getErrorDesc, userName) + ) } if (!isTokenAcceptableWithHost(tmpToken, host)) { ok = false - throw new TokenAuthException(ILLEGAL_HOST.getErrorCode, ILLEGAL_HOST.getErrorDesc) + throw new TokenAuthException( + ILLEGAL_HOST.getErrorCode, + MessageFormat.format(ILLEGAL_HOST.getErrorDesc, host) + ) } ok } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java index 0412197a03..3251684f3c 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/java/org/apache/linkis/gateway/authentication/service/CachedTokenServiceTest.java @@ -77,5 +77,16 @@ void testDoAuth() { assertThrows( TokenAuthException.class, () -> tokenService.doAuth(TokenName, "test", "10.10.10.10")); logger.info("assertThrows:{}", exception.getMessage()); + + exception = + assertThrows( + TokenAuthException.class, () -> tokenService.doAuth("NOT-EXIST", "test", "127.0.0.1")); + logger.info("assertThrows:{}", exception.getMessage()); + + exception = + assertThrows( + TokenAuthException.class, + () -> tokenService.doAuth("LINKISCLI-AUTH", "test", "127.0.0.1")); + logger.info("assertThrows:{}", exception.getMessage()); } } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/data.sql b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/data.sql index 9c86868a63..1eb479b7cb 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/data.sql +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/data.sql @@ -25,4 +25,4 @@ INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hos INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('dss-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'); INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('QUALITIS-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'); INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('VALIDATOR-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'); -INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('LINKISCLI-AUTH','*','*','BDP',curdate(),curdate(),-1,'LINKIS'); +INSERT INTO `linkis_mg_gateway_auth_token`(`token_name`,`legal_users`,`legal_hosts`,`business_owner`,`create_time`,`update_time`,`elapse_day`,`update_by`) VALUES ('LINKISCLI-AUTH','*','*','BDP',curdate(),curdate(),-3,'LINKIS'); diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala index dd336809ab..bf56e10498 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/parser/GatewayParser.scala @@ -108,6 +108,12 @@ class DefaultGatewayParser(gatewayParsers: Array[GatewayParser]) extends Abstrac gatewayContext.getGatewayRoute.setRequestURI(path) } gatewayParsers.foreach(_.parse(gatewayContext)) + + /** + * Gateway forwarding logic: PublicService Service exists and is effective And then judge + * metadataquery Service, Continue to judge linkismanager Service, Final judgment + * linkispsdatasource Service + */ if (gatewayContext.getGatewayRoute.getServiceInstance == null) path match { case CLIENT_HEARTBEAT_REGEX(version) => if (sendResponseWhenNotMatchVersion(gatewayContext, version)) return @@ -126,6 +132,9 @@ class DefaultGatewayParser(gatewayParsers: Array[GatewayParser]) extends Abstrac RPCConfiguration.METADATAQUERY_SERVICE_APPLICATION_NAME.getValue } else if (RPCConfiguration.LINKIS_MANAGER_SERVICE_LIST.contains(serviceId)) { RPCConfiguration.LINKIS_MANAGER_SERVICE_NAME.getValue + // After the complete merge is completed, it needs to be removed + } else if (RPCConfiguration.LINKIS_DATASOURCE_SERVICE_LIST.contains(serviceId)) { + RPCConfiguration.LINKIS_DATASOURCE_SERVICE_NAME.getValue } else { serviceId } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/GatewaySSOUtils.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/GatewaySSOUtils.scala index 675944013e..2d696d8410 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/GatewaySSOUtils.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/GatewaySSOUtils.scala @@ -113,7 +113,17 @@ object GatewaySSOUtils extends Logging { def removeLoginUser(gatewayContext: GatewayContext): Unit = { SSOUtils.removeLoginUser(gatewayContext.getRequest.getCookies.asScala.flatMap(_._2).toArray) - SSOUtils.removeLoginUserByAddCookie(c => gatewayContext.getResponse.addCookie(c)) + SSOUtils.removeLoginUserByAddCookie(c => { + if (cookieDomainSetupSwitch) { + val host = gatewayContext.getRequest.getHeaders.get("Host") + if (host != null && host.nonEmpty) { + c.setDomain( + getCookieDomain(host.head, GatewayConfiguration.GATEWAY_DOMAIN_LEVEL.getValue) + ) + } + } + gatewayContext.getResponse.addCookie(c) + }) } def updateLastAccessTime(gatewayContext: GatewayContext): Unit = diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala index 6e3aca6c91..b0f42f9ad0 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala @@ -238,9 +238,13 @@ abstract class UserPwdAbstractUserRestful extends AbstractUserRestful with Loggi def clearExpireCookie(gatewayContext: GatewayContext): Unit = { val cookies = gatewayContext.getRequest.getCookies.values().asScala.flatMap(cookie => cookie).toArray - val expireCookies = cookies.filter(cookie => - cookie.getName.equals(ServerConfiguration.LINKIS_SERVER_SESSION_TICKETID_KEY.getValue) - ) + val expireCookies = cookies.filter { cookie => + cookie.getName.equals( + ServerConfiguration.LINKIS_SERVER_SESSION_TICKETID_KEY.getValue + ) || cookie.getName.equals( + ServerConfiguration.LINKIS_SERVER_SESSION_PROXY_TICKETID_KEY.getValue + ) + } val host = gatewayContext.getRequest.getHeaders.get("Host") if (host != null && host.nonEmpty) { val maxDomainLevel = host.head.split("\\.").length diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/token/TokenAuthentication.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/token/TokenAuthentication.scala index 9a460eb52c..c5c801029a 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/token/TokenAuthentication.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/token/TokenAuthentication.scala @@ -93,7 +93,7 @@ object TokenAuthentication extends Logging { }) if (ok) { logger.info( - s"Token authentication succeed, uri: ${gatewayContext.getRequest.getRequestURI}, token: $token, tokenUser: $tokenUser." + s"Token authentication succeed, uri: ${gatewayContext.getRequest.getRequestURI}, token: $token, tokenUser: $tokenUser, host: $host." ) if (login) { logger.info( @@ -115,6 +115,9 @@ object TokenAuthentication extends Logging { } true } else { + logger.info( + s"Token authentication fail, uri: ${gatewayContext.getRequest.getRequestURI}, token: $token, tokenUser: $tokenUser, host: $host." + ) SecurityFilter.filterResponse(gatewayContext, authMsg) false } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSResult.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSResult.scala index bada2315cb..3f00f61f5f 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSResult.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/org/apache/linkis/httpclient/dws/response/DWSResult.scala @@ -82,7 +82,9 @@ trait DWSResult extends Logging with HttpResult { this.url = url this.contentType = contentType } { case e: Exception => - logger.error(e.getMessage()) + throw new HttpClientResultException( + s"URL $url request failed! ResponseBody is $responseBody. ${e.getMessage}" + ) } } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml index ef4635ae00..8d8c131abb 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml @@ -94,19 +94,6 @@ jackson-databind - - - com.webank.wedatasphere.dss - dss-gateway-support - 1.1.1 - - - org.apache.linkis - linkis-gateway-server-support - - - - @@ -115,7 +102,6 @@ net.alchim31.maven scala-maven-plugin - org.apache.maven.plugins maven-assembly-plugin @@ -126,7 +112,7 @@ false false - /src/main/assembly/distribution.xml + src/main/assembly/distribution.xml diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml index 7180fea9ce..cf2fd12322 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/assembly/distribution.xml @@ -21,7 +21,6 @@ linkis-gateway dir - zip false linkis-gateway @@ -44,9 +43,5 @@ - - - - diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSGatewayConfiguration.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSGatewayConfiguration.scala new file mode 100644 index 0000000000..937b0ddeb7 --- /dev/null +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSGatewayConfiguration.scala @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.gateway.dss.parser + +import org.apache.linkis.common.conf.CommonVars + +object DSSGatewayConfiguration { + val DSS_SPRING_NAME: CommonVars[String] = CommonVars("wds.linkis.dss.name", "dss-server") + + val DSS_URL_LABEL_PREFIX: CommonVars[String] = + CommonVars("wds.dss.gateway.url.prefix.name", "labels") + + val DSS_URL_ROUTE_LABEL_PREFIX: CommonVars[String] = + CommonVars("wds.dss.gateway.url.prefix.name", "labelsRoute") + + val DSS_URL_APPCONNS: CommonVars[String] = CommonVars("wds.dss.gateway.url.appconns", "visualis") + + val DSS_APPS_SERVER_OTHER_PREFIX: CommonVars[String] = + CommonVars("wds.dss.gateway.apps.server.other.prefix", "scriptis,apiservice,datapipe,guide") + + val DSS_APPS_SERVER_DISTINCT_NAME: CommonVars[String] = + CommonVars("wds.dss.gateway.apps.server.distinct.name", "apps") + + val DSS_APPS_SERVER_ISMERGE: CommonVars[Boolean] = + CommonVars("wds.dss.gateway.apps.server.ismerge", true) + +} diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSGatewayParser.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSGatewayParser.scala new file mode 100644 index 0000000000..e1b4bf430e --- /dev/null +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSGatewayParser.scala @@ -0,0 +1,291 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.gateway.dss.parser + +import org.apache.linkis.common.ServiceInstance +import org.apache.linkis.gateway.exception.TooManyServiceException +import org.apache.linkis.gateway.http.GatewayContext +import org.apache.linkis.gateway.parser.AbstractGatewayParser +import org.apache.linkis.gateway.springcloud.SpringCloudGatewayConfiguration.{ + normalPath, + API_URL_PREFIX +} +import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactoryContext +import org.apache.linkis.manager.label.entity.Label +import org.apache.linkis.manager.label.entity.route.RouteLabel +import org.apache.linkis.protocol.constants.TaskConstant +import org.apache.linkis.protocol.utils.ZuulEntranceUtils +import org.apache.linkis.rpc.sender.SpringCloudFeignConfigurationCache +import org.apache.linkis.server.BDPJettyServerHelper + +import org.springframework.stereotype.Component + +import java.util +import java.util.Locale + +import scala.collection.JavaConverters._ + +@Component +class DSSGatewayParser extends AbstractGatewayParser { + + val appConns = DSSGatewayConfiguration.DSS_URL_APPCONNS.getValue.split(",") + + override def shouldContainRequestBody(gatewayContext: GatewayContext): Boolean = { + var contentType = gatewayContext.getRequest.getHeaders.get("Content-Type") + if (null == contentType) { + contentType = gatewayContext.getRequest.getHeaders.get("content-type") + } + + if ( + contentType != null && contentType.nonEmpty + && contentType(0).contains("form-data") + ) { + logger.info("DSS gateway get request type is form-data") + return false + } + + gatewayContext.getRequest.getRequestURI match { + case DSSGatewayParser.DSS_URL_DEFAULT_REGEX(_, _) => true + case DSSGatewayParser.DSS_URL_REGEX(_, _, _) => true + case DSSGatewayParser.APPCONN_URL_DEFAULT_REGEX(_, appconn, _) + if appConns.contains(appconn) => + true + case _ => false + } + } + + override def parse(gatewayContext: GatewayContext): Unit = + gatewayContext.getRequest.getRequestURI match { + + case DSSGatewayParser.DSS_URL_FLOW_QUERY_PREFIX(version, execId, _) => + // must put it before DSS_URL_REGEX(_, _, _), because this match was included in DSS_URL_REGEX(_, _, _) + if (sendResponseWhenNotMatchVersion(gatewayContext, version)) return + val serviceInstances = ZuulEntranceUtils.parseServiceInstanceByExecID(execId) + gatewayContext.getGatewayRoute.setServiceInstance(serviceInstances(0)) + case DSSGatewayParser.DSS_URL_REGEX(version, firstName, secondName) => + if (sendResponseWhenNotMatchVersion(gatewayContext, version)) return + var tmpServerName = "dss-" + firstName + "-" + secondName + "-server" + tmpServerName = getServiceNameFromLabel(gatewayContext, tmpServerName) + // apiservice,datapipe,scriptis和guide服务合并到dss-apps-server,其中的接口需要转发到apps服务 + var tmpFirstName = firstName + if ( + DSSGatewayConfiguration.DSS_APPS_SERVER_ISMERGE.getValue && + DSSGatewayConfiguration.DSS_APPS_SERVER_OTHER_PREFIX.getValue + .split(",") + .contains(firstName) + ) { + tmpFirstName = + DSSGatewayConfiguration.DSS_APPS_SERVER_DISTINCT_NAME.getValue + "/" + firstName + } + val serviceName: Option[String] = + findCommonService("dss/" + tmpFirstName + "/" + secondName, tmpServerName) + if (serviceName.isDefined) { + gatewayContext.getGatewayRoute.setServiceInstance(ServiceInstance(serviceName.get, null)) + } else { + logger.info( + "Now set default serviceInstance name " + DSSGatewayConfiguration.DSS_SPRING_NAME.getValue + "," + gatewayContext.getRequest.getRequestURI + ) + gatewayContext.getGatewayRoute.setServiceInstance( + ServiceInstance(DSSGatewayConfiguration.DSS_SPRING_NAME.getValue, null) + ) + } + case DSSGatewayParser.DSS_URL_DEFAULT_REGEX(version, firstName) => + if (sendResponseWhenNotMatchVersion(gatewayContext, version)) return + var tmpServerName = "dss-" + firstName + "-server" + tmpServerName = getServiceNameFromLabel(gatewayContext, tmpServerName) + // apiservice,datapipe,scriptis和guide服务合并到dss-apps-server,其中的接口需要转发到apps服务 + var tmpFirstName = firstName + if ( + DSSGatewayConfiguration.DSS_APPS_SERVER_ISMERGE.getValue && + DSSGatewayConfiguration.DSS_APPS_SERVER_OTHER_PREFIX.getValue + .split(",") + .contains(firstName) + ) { + tmpFirstName = + DSSGatewayConfiguration.DSS_APPS_SERVER_DISTINCT_NAME.getValue + "/" + firstName + } + val serviceName: Option[String] = findCommonService("dss/" + tmpFirstName, tmpServerName) + if (serviceName.isDefined) { + gatewayContext.getGatewayRoute.setServiceInstance(ServiceInstance(serviceName.get, null)) + } else { + logger.info( + "Now set default serviceInstance name " + DSSGatewayConfiguration.DSS_SPRING_NAME.getValue + "," + gatewayContext.getRequest.getRequestURI + ) + gatewayContext.getGatewayRoute.setServiceInstance( + ServiceInstance(DSSGatewayConfiguration.DSS_SPRING_NAME.getValue, null) + ) + } + case DSSGatewayParser.APPCONN_URL_DEFAULT_REGEX(version, serverName, _) + if appConns.contains(serverName) => + if (sendResponseWhenNotMatchVersion(gatewayContext, version)) return + var tmpServerName = serverName + tmpServerName = getServiceNameFromLabel(gatewayContext, tmpServerName) + val serviceName: Option[String] = findCommonService(tmpServerName, tmpServerName) + if (serviceName.isDefined) { + gatewayContext.getGatewayRoute.setServiceInstance(ServiceInstance(serviceName.get, null)) + } else { + logger.info( + "Now set default serviceInstance name " + DSSGatewayConfiguration.DSS_SPRING_NAME.getValue + "," + gatewayContext.getRequest.getRequestURI + ) + gatewayContext.getGatewayRoute.setServiceInstance( + ServiceInstance(DSSGatewayConfiguration.DSS_SPRING_NAME.getValue, null) + ) + } + case _ => + } + + private def getServiceNameFromLabel( + gatewayContext: GatewayContext, + tmpServiceName: String + ): String = { + var requestUrlLabels = gatewayContext.getRequest.getQueryParams + .getOrDefault(DSSGatewayConfiguration.DSS_URL_LABEL_PREFIX.getValue, null) + if (requestUrlLabels == null) { + requestUrlLabels = gatewayContext.getRequest.getQueryParams + .getOrDefault(DSSGatewayConfiguration.DSS_URL_ROUTE_LABEL_PREFIX.getValue, null) + } + logger.info( + "Get ServiceName From Label and method is " + gatewayContext.getRequest.getMethod.toString + ",and urlLabels is " + requestUrlLabels + ) + val requestMethod = gatewayContext.getRequest.getMethod.toLowerCase(Locale.getDefault()) + if ( + requestUrlLabels == null && (requestMethod + .equals("post") || requestMethod.equals("put") || requestMethod.equals("delete")) + ) { + val requestBody = Option(gatewayContext.getRequest.getRequestBody) + val routeLabelList = new util.ArrayList[RouteLabel]() + + requestBody match { + // todo form-data resolve + case Some(body) => + val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory + val json = + BDPJettyServerHelper.gson.fromJson(body, classOf[java.util.Map[String, Object]]) + val labels: util.List[Label[_]] = json.get(TaskConstant.LABELS) match { + case map: util.Map[String, Object] => labelBuilderFactory.getLabels(map) + case map: util.Map[String, Any] => labelBuilderFactory.getLabels(map.asInstanceOf) + case _ => new util.ArrayList[Label[_]]() + } + labels.asScala + .filter(label => label.isInstanceOf[RouteLabel]) + .foreach(label => { + routeLabelList.add(label.asInstanceOf[RouteLabel]) + }) + + case _ => null + } + val labelNameList = routeLabelList.asScala.map(routeLabel => routeLabel.getStringValue).toList + if (labelNameList != null && labelNameList.size > 0) { + genServiceNameByDSSLabel(labelNameList, tmpServiceName) + } else if (null != requestUrlLabels) { + genServiceNameByDSSLabel(requestUrlLabels.toList, tmpServiceName) + } else tmpServiceName + + } else { + if (requestUrlLabels != null) { + genServiceNameByDSSLabel(requestUrlLabels.toList, tmpServiceName) + } else tmpServiceName + } + } + + private def genServiceNameByDSSLabel(labelList: List[String], tmpServiceName: String): String = { + var resultName = tmpServiceName + if (null != labelList && labelList.size > 0) { + val labelNameList = labelList(0).replace(" ", "").split(",").toList + if (labelNameList.size > 0) { + if (labelNameList.find(name => name.equalsIgnoreCase("dev")).isDefined) { + resultName = tmpServiceName + "-dev" + } else if (labelNameList.find(name => name.equalsIgnoreCase("prod")).isDefined) { + resultName = tmpServiceName + "-prod" + } else if (labelNameList.find(name => name.equalsIgnoreCase("test")).isDefined) { + resultName = tmpServiceName + "-test" + } else { + resultName = tmpServiceName + } + } + } + resultName + } + + private def findCommonService(parsedServiceId: String, tmpServerName: String) = findService( + parsedServiceId, + tmpServerName, + services => { + val errorMsg = new TooManyServiceException( + s"Cannot find a correct serviceId for parsedServiceId $parsedServiceId, service list is: " + services + ) + warn("", errorMsg) + throw errorMsg + } + ) + + protected def findService( + parsedServiceId: String, + tmpServerName: String, + tooManyDeal: List[String] => Option[String] + ): Option[String] = { + val findIt: (String => Boolean) => Option[String] = op => { + val services = + SpringCloudFeignConfigurationCache.getDiscoveryClient.getServices.asScala.filter(op).toList + if (services.length == 1) Some(services.head) + else if (services.length > 1) tooManyDeal(services) + else None + } + // 通过匹配到最多的url中的path进行路由,如/dss/framework/workspace/ 会匹配到 dss-framework-workspace-server 而不是 dss-server + // 如果产生了相等的情况,则按照短的service名字为准 比如/dss/getProject, + // 我们可能会匹配到dss-server以及 dss-framework-workspace-server,则选择短名称的dss-server + val findMostCorrect: (String => (String, Int)) => Option[String] = { op => + { + val serviceMap = + SpringCloudFeignConfigurationCache.getDiscoveryClient.getServices.asScala.map(op).toMap + var count = 0 + var retService: Option[String] = None + serviceMap.foreach { case (k, v) => + if (v > count) { + count = v + retService = Some(k) + } else if (retService.isDefined && v == count && k.length < retService.get.length) { + retService = Some(k) + } + } + retService + } + } + var lowerServiceId = parsedServiceId.toLowerCase(Locale.getDefault()) + val serverName = tmpServerName.toLowerCase(Locale.getDefault()) + // 让prod的接口匹配到prod的服务 + if (serverName.endsWith("-prod")) lowerServiceId += "/prod" + findIt(_.toLowerCase(Locale.getDefault()) == serverName).orElse(findMostCorrect(service => { + (service, lowerServiceId.split("/").count(word => service.contains(word))) + })) + } + +} + +object DSSGatewayParser { + val DSS_HEADER = normalPath(API_URL_PREFIX) + "rest_[a-zA-Z][a-zA-Z_0-9]*/(v\\d+)/dss/" + val DSS_URL_REGEX = (DSS_HEADER + "([^/]+)/" + "([^/]+)/.+").r + val DSS_URL_DEFAULT_REGEX = (DSS_HEADER + "([^/]+).+").r + + val APPCONN_HEADER = normalPath(API_URL_PREFIX) + "rest_[a-zA-Z][a-zA-Z_0-9]*/(v\\d+)/([^/]+)/" + val APPCONN_URL_DEFAULT_REGEX = (APPCONN_HEADER + "([^/]+).+").r + + val DSS_URL_FLOW_QUERY_PREFIX = + (DSS_HEADER + "flow/entrance/" + "([^/]+)/" + "(status|execution|kill)").r + +} diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSRouteLabelParser.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSRouteLabelParser.scala new file mode 100644 index 0000000000..ddf87c3258 --- /dev/null +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/src/main/scala/org/apache/linkis/gateway/dss/parser/DSSRouteLabelParser.scala @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.gateway.dss.parser + +import org.apache.linkis.gateway.http.GatewayContext +import org.apache.linkis.gateway.ujes.route.label.RouteLabelParser +import org.apache.linkis.manager.label.entity.route.RouteLabel + +import org.springframework.stereotype.Component + +import java.util + +@Component +class DSSRouteLabelParser extends RouteLabelParser { + + override def parse(gatewayContext: GatewayContext): util.List[RouteLabel] = { + val routeLabelList = new util.ArrayList[RouteLabel]() + var requestLabels = gatewayContext.getRequest.getQueryParams + .getOrDefault(DSSGatewayConfiguration.DSS_URL_LABEL_PREFIX.getValue, null) + if (requestLabels == null) { + requestLabels = gatewayContext.getRequest.getQueryParams + .getOrDefault(DSSGatewayConfiguration.DSS_URL_ROUTE_LABEL_PREFIX.getValue, null) + } + if (null != requestLabels && requestLabels.size > 0) { + val labelNameList = requestLabels(0).replace(" ", "").split(",").toList + if (labelNameList.size > 0) labelNameList.foreach(labelName => { + val routeLabel = new RouteLabel + routeLabel.setRoutePath(labelName) +// routeLabelList.add(routeLabel) + }) + } + if (routeLabelList.isEmpty) { + val requestBody = Option(gatewayContext.getRequest.getRequestBody) + requestBody match { + // todo form-data resolve + case Some(body) => + if (body.contains("form-data")) {} else { +// val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory +// val json = BDPJettyServerHelper.gson.fromJson(body, classOf[java.util.Map[String, Object]]) +// val labels: util.List[Label[_]] = json.get(TaskConstant.LABELS) match { +// case map: util.Map[String, Object] => labelBuilderFactory.getLabels(map) +// case map: util.Map[String, Any] => labelBuilderFactory.getLabels(map.asInstanceOf) +// case _ => new util.ArrayList[Label[_]]() +// } +// labels.filter(label => label.isInstanceOf[RouteLabel]).foreach(label => { +// routeLabelList.add(label.asInstanceOf[RouteLabel]) +// }) + } + case _ => null + } + } + + routeLabelList + } + +} diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/handler/CustomErrorAttributes.java b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/handler/CustomErrorAttributes.java new file mode 100644 index 0000000000..ff27b76b47 --- /dev/null +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/handler/CustomErrorAttributes.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.gateway.springcloud.handler; + +import org.springframework.boot.web.error.ErrorAttributeOptions; +import org.springframework.boot.web.reactive.error.DefaultErrorAttributes; +import org.springframework.core.annotation.MergedAnnotation; +import org.springframework.core.annotation.MergedAnnotations; +import org.springframework.http.HttpStatus; +import org.springframework.stereotype.Component; +import org.springframework.web.bind.annotation.ResponseStatus; +import org.springframework.web.reactive.function.server.ServerRequest; +import org.springframework.web.server.ResponseStatusException; + +import java.util.HashMap; +import java.util.Map; + +import com.google.common.collect.Lists; + +@Component +public class CustomErrorAttributes extends DefaultErrorAttributes { + + @Override + public Map getErrorAttributes( + ServerRequest request, ErrorAttributeOptions options) { + Throwable throwable = this.getError(request); + MergedAnnotation responseStatusAnnotation = + MergedAnnotations.from( + throwable.getClass(), MergedAnnotations.SearchStrategy.TYPE_HIERARCHY) + .get(ResponseStatus.class); + HttpStatus errorStatus = determineHttpStatus(throwable, responseStatusAnnotation); + Map map = new HashMap<>(); + map.put("method", request.path()); + map.put("status", errorStatus.value()); + String msg = errorStatus.getReasonPhrase(); + if (errorStatus.value() >= HttpStatus.INTERNAL_SERVER_ERROR.value()) { + msg = msg + ", with request path:" + request.path(); + } + map.put("message", msg); + map.put("data", Lists.newArrayList()); + + return map; + } + + private HttpStatus determineHttpStatus( + Throwable error, MergedAnnotation responseStatusAnnotation) { + if (error instanceof ResponseStatusException) { + return ((ResponseStatusException) error).getStatus(); + } + return responseStatusAnnotation + .getValue("code", HttpStatus.class) + .orElse(HttpStatus.INTERNAL_SERVER_ERROR); + } +} diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/GatewayAuthorizationFilter.java b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/GatewayAuthorizationFilter.java index 9bc1fa6017..71e53fb2b6 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/GatewayAuthorizationFilter.java +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/src/main/java/org/apache/linkis/gateway/springcloud/http/GatewayAuthorizationFilter.java @@ -166,7 +166,7 @@ private Mono gatewayDeal( if (serviceInstance != null) { logger.info( "Client request ip: " - + gatewayContext.getRequest().getRemoteAddress() + + gatewayContext.getRequest().getRequestRealIpAddr() + " and uri: " + gatewayContext.getRequest().getRequestURI() + "GatewayRouter route requestUri: " diff --git a/linkis-web/package.json b/linkis-web/package.json index 0b1b838556..809a551561 100644 --- a/linkis-web/package.json +++ b/linkis-web/package.json @@ -18,13 +18,12 @@ }, "lint-staged": { "src/**/*.{js,vue}": [ - "vue-cli-service lint --no-fix", - "git add" + "vue-cli-service lint --no-fix" ] }, "dependencies": { "@form-create/iview": "2.5.27", - "axios": "^0.21.4", + "axios": "0.21.4", "babel-polyfill": "6.26.0", "core-js": "3.27.2", "dexie": "3.2.3", @@ -34,17 +33,18 @@ "highlight.js": "10.7.0", "iview": "3.5.4", "jsencrypt": "3.2.1", - "lodash": "^4.17.21", + "lodash": "4.17.21", "md5": "2.3.0", "mitt": "1.2.0", - "moment": "^2.29.4", + "moment": "2.29.4", "monaco-editor": "0.30.1", "object-to-formdata": "4.2.2", - "path-browserify": "^1.0.1", - "qs": "^6.11.0", + "path-browserify": "1.0.1", + "postcss": "8.4.21", + "qs": "6.11.0", "reconnecting-websocket": "4.4.0", "sql-formatter": "2.3.3", - "svgo": "^3.0.2", + "svgo": "3.0.2", "v-jsoneditor": "1.4.5", "vue": "2.6.12", "vue-i18n": "8.22.1", @@ -55,28 +55,29 @@ }, "devDependencies": { "@intlify/vue-i18n-loader": "1.0.0", - "@vue/cli-plugin-babel": "^5.0.8", - "@vue/cli-plugin-eslint": "^5.0.8", - "@vue/cli-service": "^5.0.8", + "@vue/cli-plugin-babel": "5.0.8", + "@vue/cli-plugin-eslint": "5.0.8", + "@vue/cli-service": "5.0.8", "@vue/eslint-config-standard": "4.0.0", "archiver": "3.1.1", + "autoprefixer": "10.4.14", "babel-eslint": "10.1.0", - "copy-webpack-plugin": "^9.1.0", + "copy-webpack-plugin": "9.1.0", "csp-html-webpack-plugin": "5.1.0", - "filemanager-webpack-plugin": "^7.0.0", + "filemanager-webpack-plugin": "7.0.0", "husky": "1.3.1", - "lint-staged": "^13.1.1", + "lint-staged": "13.1.1", "material-design-icons": "3.0.1", "monaco-editor-webpack-plugin": "6.0.0", - "node-sass": "^8.0.0", - "npm-force-resolutions": "^0.0.10", - "sass-loader": "^10.4.1", + "node-sass": "8.0.0", + "npm-force-resolutions": "0.0.10", + "sass-loader": "10.4.1", "svg-sprite-loader": "6.0.0", "vue-cli-plugin-mockjs": "0.1.3", "vue-template-compiler": "2.6.12", "webpack-virtual-modules": "0.3.2" }, "resolutions": { - "postcss": "7.0.36" + "postcss": "8.4.21" } } diff --git a/linkis-web/release-docs/licenses/LICENSE-hint.css.txt b/linkis-web/release-docs/licenses/LICENSE-hint.css.txt new file mode 100644 index 0000000000..9961a4fb36 --- /dev/null +++ b/linkis-web/release-docs/licenses/LICENSE-hint.css.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Kushagra Gour + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/linkis-web/src/apps/URM/module/functionManagement/index.vue b/linkis-web/src/apps/URM/module/functionManagement/index.vue index c756513faf..389b1003c3 100644 --- a/linkis-web/src/apps/URM/module/functionManagement/index.vue +++ b/linkis-web/src/apps/URM/module/functionManagement/index.vue @@ -573,7 +573,7 @@ export default { if (!args.row) return this.$Modal.confirm({ title: this.$t('message.linkis.modal.modalTitle'), - content: this.$t('message.linkis.modal.modalDelete', {envName: args.row.udfName}), + content: this.$t('message.linkis.modal.modalDelete', {name: args.row.udfName}), onOk: ()=>{ api .fetch(`/udf/delete/${args.row.id}`, {}, 'post') diff --git a/linkis-web/src/apps/URM/module/udfManagement/index.vue b/linkis-web/src/apps/URM/module/udfManagement/index.vue index da73981ae0..870b6ddf2c 100644 --- a/linkis-web/src/apps/URM/module/udfManagement/index.vue +++ b/linkis-web/src/apps/URM/module/udfManagement/index.vue @@ -567,7 +567,7 @@ export default { if (!args.row) return this.$Modal.confirm({ title: this.$t('message.linkis.modal.modalTitle'), - content: this.$t('message.linkis.modal.modalDelete', {envName: args.row.udfName}), + content: this.$t('message.linkis.modal.modalDelete', {name: args.row.udfName}), onOk: ()=>{ api.fetch(`/udf/delete/${args.row.id}`, {}, 'post') .then(() => { diff --git a/linkis-web/src/apps/linkis/assets/styles/console.scss b/linkis-web/src/apps/linkis/assets/styles/console.scss index 4f89988448..008a4b7686 100644 --- a/linkis-web/src/apps/linkis/assets/styles/console.scss +++ b/linkis-web/src/apps/linkis/assets/styles/console.scss @@ -15,8 +15,31 @@ * limitations under the License. */ -@import '@/common/style/variables.scss'; + @charset "UTF-8"; + @import '@/common/style/variables.scss'; + @import './hint.min.css'; +// * { + +// font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt"; +// } +body { + font-family: "Helvetica Neue",Helvetica,"PingFang SC","Hiragino Sans GB","Microsoft YaHei","微软雅黑",Arial,sans-serif, "JinbiaoSong", "JinbiaoSongExt"; +} .console-page{ + .ivu-input { + font-family: Arial, -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt"; + } + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt" !important; + .monaco-editor { + --monaco-monospace-font: "SF Mono", Monaco, Menlo, Consolas, "Ubuntu Mono", "Liberation Mono", "DejaVu Sans Mono", "Courier New", monospace, "JinbiaoSong", "JinbiaoSongExt"; + } + .monaco-mouse-cursor-text { + font-family: Consolas, "Courier New", monospace, "JinbiaoSong", "JinbiaoSongExt" !important; + } + .ivu-tooltip-inner { + overflow-wrap: break-word; + white-space: normal; + } position: $relative; width: $percent-all; height: $percent-all; @@ -45,6 +68,9 @@ } } .content-body-side-right { + .ivu-page-item-jump-next:after, .ivu-page-item-jump-prev:after { + content: "\2022\2022\2022"; + } flex: 1; background: $body-background; border-radius: $border-radius-small; diff --git a/linkis-web/src/apps/linkis/assets/styles/hint.min.css b/linkis-web/src/apps/linkis/assets/styles/hint.min.css new file mode 100644 index 0000000000..530a95ef74 --- /dev/null +++ b/linkis-web/src/apps/linkis/assets/styles/hint.min.css @@ -0,0 +1,21 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +/*! Hint.css - v2.7.0 - 2021-10-01 +* https://kushagra.dev/lab/hint/ +* Copyright (c) 2021 Kushagra Gour */ + +[class*=hint--]{position:relative ; width: 80;}[class*=hint--]:after,[class*=hint--]:before{position:absolute;-webkit-transform:translate3d(0,0,0);-moz-transform:translate3d(0,0,0);transform:translate3d(0,0,0);visibility:hidden;opacity:0;z-index:1000000;pointer-events:none;-webkit-transition:.3s ease;-moz-transition:.3s ease;transition:.3s ease;-webkit-transition-delay:0s;-moz-transition-delay:0s;transition-delay:0s}[class*=hint--]:hover:after,[class*=hint--]:hover:before{visibility:visible;opacity:1;-webkit-transition-delay:.1s;-moz-transition-delay:.1s;transition-delay:.1s}[class*=hint--]:before{content:'';position:absolute;background:0 0;border:6px solid transparent;z-index:1000001}[class*=hint--]:after{background:#383838;color:#fff;padding:8px 10px;font-size:12px;font-family:-apple-system, BlinkMacSystemFont, "Segoe UI", "PingFang SC", "Hiragino Sans GB", "Microsoft YaHei", "Helvetica Neue", Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol", "JinbiaoSong", "JinbiaoSongExt";line-height:12px;white-space:pre-line;word-break: normal;text-shadow:0 -1px 0 #000;box-shadow:4px 4px 8px rgba(0,0,0,.3)}[class*=hint--][aria-label]:after{content:attr(aria-label)}[class*=hint--][data-hint]:after{content:attr(data-hint)}[aria-label='']:after,[aria-label='']:before,[data-hint='']:after,[data-hint='']:before{display:none!important}.hint--top-left:before,.hint--top-right:before,.hint--top:before{border-top-color:#383838}.hint--bottom-left:before,.hint--bottom-right:before,.hint--bottom:before{border-bottom-color:#383838}.hint--top:after,.hint--top:before{bottom:100%;left:50%}.hint--top:before{margin-bottom:-11px;left:calc(50% - 6px)}.hint--top:after{-webkit-transform:translateX(-50%);-moz-transform:translateX(-50%);transform:translateX(-50%)}.hint--top:hover:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--top:hover:after{-webkit-transform:translateX(-50%) translateY(-8px);-moz-transform:translateX(-50%) translateY(-8px);transform:translateX(-50%) translateY(-8px)}.hint--bottom:after,.hint--bottom:before{top:100%;left:50%}.hint--bottom:before{margin-top:-11px;left:calc(50% - 6px)}.hint--bottom:after{-webkit-transform:translateX(-50%);-moz-transform:translateX(-50%);transform:translateX(-50%)}.hint--bottom:hover:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--bottom:hover:after{-webkit-transform:translateX(-50%) translateY(8px);-moz-transform:translateX(-50%) translateY(8px);transform:translateX(-50%) translateY(8px)}.hint--right:before{border-right-color:#383838;margin-left:-11px;margin-bottom:-6px}.hint--right:after{margin-bottom:-14px}.hint--right:after,.hint--right:before{left:100%;bottom:50%}.hint--right:hover:after,.hint--right:hover:before{-webkit-transform:translateX(8px);-moz-transform:translateX(8px);transform:translateX(8px)}.hint--left:before{border-left-color:#383838;margin-right:-11px;margin-bottom:-6px}.hint--left:after{margin-bottom:-14px}.hint--left:after,.hint--left:before{right:100%;bottom:50%}.hint--left:hover:after,.hint--left:hover:before{-webkit-transform:translateX(-8px);-moz-transform:translateX(-8px);transform:translateX(-8px)}.hint--top-left:after,.hint--top-left:before{bottom:100%;left:50%}.hint--top-left:before{margin-bottom:-11px;left:calc(50% - 6px)}.hint--top-left:after{-webkit-transform:translateX(-100%);-moz-transform:translateX(-100%);transform:translateX(-100%);margin-left:12px}.hint--top-left:hover:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--top-left:hover:after{-webkit-transform:translateX(-100%) translateY(-8px);-moz-transform:translateX(-100%) translateY(-8px);transform:translateX(-100%) translateY(-8px)}.hint--top-right:after,.hint--top-right:before{bottom:100%;left:50%}.hint--top-right:before{margin-bottom:-11px;left:calc(50% - 6px)}.hint--top-right:after{-webkit-transform:translateX(0);-moz-transform:translateX(0);transform:translateX(0);margin-left:-12px}.hint--top-right:hover:after,.hint--top-right:hover:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--bottom-left:after,.hint--bottom-left:before{top:100%;left:50%}.hint--bottom-left:before{margin-top:-11px;left:calc(50% - 6px)}.hint--bottom-left:after{-webkit-transform:translateX(-100%);-moz-transform:translateX(-100%);transform:translateX(-100%);margin-left:12px}.hint--bottom-left:hover:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--bottom-left:hover:after{-webkit-transform:translateX(-100%) translateY(8px);-moz-transform:translateX(-100%) translateY(8px);transform:translateX(-100%) translateY(8px)}.hint--bottom-right:after,.hint--bottom-right:before{top:100%;left:50%}.hint--bottom-right:before{margin-top:-11px;left:calc(50% - 6px)}.hint--bottom-right:after{-webkit-transform:translateX(0);-moz-transform:translateX(0);transform:translateX(0);margin-left:-12px}.hint--bottom-right:hover:after,.hint--bottom-right:hover:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--large:after,.hint--medium:after,.hint--small:after{white-space:normal;line-height:1.4em;word-wrap:break-word}.hint--small:after{width:80px}.hint--medium:after{width:150px}.hint--large:after{width:300px}.hint--error:after{background-color:#b34e4d;text-shadow:0 -1px 0 #592726}.hint--error.hint--top-left:before,.hint--error.hint--top-right:before,.hint--error.hint--top:before{border-top-color:#b34e4d}.hint--error.hint--bottom-left:before,.hint--error.hint--bottom-right:before,.hint--error.hint--bottom:before{border-bottom-color:#b34e4d}.hint--error.hint--left:before{border-left-color:#b34e4d}.hint--error.hint--right:before{border-right-color:#b34e4d}.hint--warning:after{background-color:#c09854;text-shadow:0 -1px 0 #6c5328}.hint--warning.hint--top-left:before,.hint--warning.hint--top-right:before,.hint--warning.hint--top:before{border-top-color:#c09854}.hint--warning.hint--bottom-left:before,.hint--warning.hint--bottom-right:before,.hint--warning.hint--bottom:before{border-bottom-color:#c09854}.hint--warning.hint--left:before{border-left-color:#c09854}.hint--warning.hint--right:before{border-right-color:#c09854}.hint--info:after{background-color:#3986ac;text-shadow:0 -1px 0 #1a3c4d}.hint--info.hint--top-left:before,.hint--info.hint--top-right:before,.hint--info.hint--top:before{border-top-color:#3986ac}.hint--info.hint--bottom-left:before,.hint--info.hint--bottom-right:before,.hint--info.hint--bottom:before{border-bottom-color:#3986ac}.hint--info.hint--left:before{border-left-color:#3986ac}.hint--info.hint--right:before{border-right-color:#3986ac}.hint--success:after{background-color:#458746;text-shadow:0 -1px 0 #1a321a}.hint--success.hint--top-left:before,.hint--success.hint--top-right:before,.hint--success.hint--top:before{border-top-color:#458746}.hint--success.hint--bottom-left:before,.hint--success.hint--bottom-right:before,.hint--success.hint--bottom:before{border-bottom-color:#458746}.hint--success.hint--left:before{border-left-color:#458746}.hint--success.hint--right:before{border-right-color:#458746}.hint--always:after,.hint--always:before{opacity:1;visibility:visible}.hint--always.hint--top:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--always.hint--top:after{-webkit-transform:translateX(-50%) translateY(-8px);-moz-transform:translateX(-50%) translateY(-8px);transform:translateX(-50%) translateY(-8px)}.hint--always.hint--top-left:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--always.hint--top-left:after{-webkit-transform:translateX(-100%) translateY(-8px);-moz-transform:translateX(-100%) translateY(-8px);transform:translateX(-100%) translateY(-8px)}.hint--always.hint--top-right:after,.hint--always.hint--top-right:before{-webkit-transform:translateY(-8px);-moz-transform:translateY(-8px);transform:translateY(-8px)}.hint--always.hint--bottom:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--always.hint--bottom:after{-webkit-transform:translateX(-50%) translateY(8px);-moz-transform:translateX(-50%) translateY(8px);transform:translateX(-50%) translateY(8px)}.hint--always.hint--bottom-left:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--always.hint--bottom-left:after{-webkit-transform:translateX(-100%) translateY(8px);-moz-transform:translateX(-100%) translateY(8px);transform:translateX(-100%) translateY(8px)}.hint--always.hint--bottom-right:after,.hint--always.hint--bottom-right:before{-webkit-transform:translateY(8px);-moz-transform:translateY(8px);transform:translateY(8px)}.hint--always.hint--left:after,.hint--always.hint--left:before{-webkit-transform:translateX(-8px);-moz-transform:translateX(-8px);transform:translateX(-8px)}.hint--always.hint--right:after,.hint--always.hint--right:before{-webkit-transform:translateX(8px);-moz-transform:translateX(8px);transform:translateX(8px)}.hint--rounded:after{border-radius:4px}.hint--no-animate:after,.hint--no-animate:before{-webkit-transition-duration:0s;-moz-transition-duration:0s;transition-duration:0s}.hint--bounce:after,.hint--bounce:before{-webkit-transition:opacity .3s ease,visibility .3s ease,-webkit-transform .3s cubic-bezier(.71,1.7,.77,1.24);-moz-transition:opacity .3s ease,visibility .3s ease,-moz-transform .3s cubic-bezier(.71,1.7,.77,1.24);transition:opacity .3s ease,visibility .3s ease,transform .3s cubic-bezier(.71,1.7,.77,1.24)}.hint--no-shadow:after,.hint--no-shadow:before{text-shadow:initial;box-shadow:initial}.hint--no-arrow:before{display:none} diff --git a/linkis-web/src/apps/linkis/components/variable/index.scss b/linkis-web/src/apps/linkis/components/variable/index.scss index 643948e8b7..9140e16a35 100644 --- a/linkis-web/src/apps/linkis/components/variable/index.scss +++ b/linkis-web/src/apps/linkis/components/variable/index.scss @@ -50,6 +50,7 @@ line-height: 24px; display: flex; align-items: center; + position: relative; @media only screen and (max-width: 1480px){ .we-variable-content-label-group { display: flex !important; diff --git a/linkis-web/src/apps/linkis/components/variable/index.vue b/linkis-web/src/apps/linkis/components/variable/index.vue index 662a3fef27..4ec5ba2936 100644 --- a/linkis-web/src/apps/linkis/components/variable/index.vue +++ b/linkis-web/src/apps/linkis/components/variable/index.vue @@ -36,7 +36,7 @@ @@ -91,7 +92,7 @@ import mixin from '@/common/service/mixin'; import ErrorCodeForm from './EditForm/index' import {add, del, edit, getList} from "./service"; -import {formatDate} from "iview/src/components/date-picker/util"; +// import {formatDate} from "iview/src/components/date-picker/util"; export default { mixins: [mixin], components: {ErrorCodeForm}, @@ -151,7 +152,7 @@ export default { align: 'center', render: (h,params)=>{ return h('div', - formatDate(new Date(params.row.createTime),'yyyy-MM-dd hh:mm:ss') + new Date(params.row.createTime).toLocaleString() ) } }, @@ -163,7 +164,7 @@ export default { align: 'center', render: (h,params)=>{ return h('div', - formatDate(new Date(params.row.updateTime),'yyyy-MM-dd hh:mm:ss') + new Date(params.row.updateTime).toLocaleString() ) } }, @@ -210,22 +211,30 @@ export default { this.page.pageNow = value this.load() }, - onAdd(){ + async onAdd(){ + + this.modalEditData = {} + this.modalShow = true + await this.$nextTick() this.$refs.errorCodeForm.formModel.resetFields() + this.$refs.errorCodeForm.formModel.setValue({}) this.modalAddMode = 'add' - this.modalShow = true }, - onTableEdit(row){ + async onTableEdit(row){ + + this.modalEditData = row + this.modalShow = true + await this.$nextTick() + this.$refs.errorCodeForm.formModel.resetFields() this.$refs.errorCodeForm.formModel.setValue(row) this.modalAddMode = 'edit' - this.modalShow = true }, onTableDelete(row){ this.$Modal.confirm({ title: this.$t('message.linkis.basedataManagement.modal.modalTitle'), - content: this.$t('message.linkis.basedataManagement.modal.modalDelete'), - onOk: ()=>{ + content: this.$t('message.linkis.basedataManagement.modal.modalDelete', {name: row.name}), + onOk: async ()=>{ let params = { id: row.id } @@ -235,6 +244,7 @@ export default { duration: 3, content: this.$t('message.linkis.basedataManagement.modal.modalDeleteSuccess') }) + this.load() }else{ this.$Message.success({ duration: 3, @@ -242,22 +252,22 @@ export default { }) } }) - this.load() } }) }, onModalOk(){ - this.$refs.errorCodeForm.formModel.submit((formData)=>{ + this.$refs.errorCodeForm.formModel.submit(async (formData)=>{ this.modalLoading = true if(this.modalAddMode=='add') { - add(formData).then((data)=>{ + await add(formData).then((data)=>{ window.console.log(data) if(data.result) { this.$Message.success({ duration: 3, content: this.$t('message.linkis.basedataManagement.modal.modalAddSuccess') }) + this.load(); }else{ this.$Message.success({ duration: 3, @@ -266,7 +276,7 @@ export default { } }) }else { - edit(formData).then((data)=>{ + await edit(formData).then((data)=>{ window.console.log(data) if(data.result) { this.$Message.success({ diff --git a/linkis-web/src/apps/linkis/module/udfTree/service.js b/linkis-web/src/apps/linkis/module/udfTree/service.js index 2f1400afbc..f4d7ea2b7a 100644 --- a/linkis-web/src/apps/linkis/module/udfTree/service.js +++ b/linkis-web/src/apps/linkis/module/udfTree/service.js @@ -23,8 +23,8 @@ const getList = (params)=> { return api.fetch(udfTreeBaseUrl, params , 'get') } -const getAll = ()=> { - return api.fetch(udfTreeBaseUrl+"/all", 'get') +const getAll = (params)=> { + return api.fetch(udfTreeBaseUrl+"/all", params, 'get') } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/PresentWay.java b/linkis-web/src/apps/linkis/module/userConfig/index.js similarity index 82% rename from linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/PresentWay.java rename to linkis-web/src/apps/linkis/module/userConfig/index.js index a91dd3d475..000e8e120d 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/src/main/java/org/apache/linkis/cli/common/entity/present/PresentWay.java +++ b/linkis-web/src/apps/linkis/module/userConfig/index.js @@ -5,16 +5,19 @@ * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * + * + * http://www.apache.org/licenses/LICENSE-2.0 + * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ + -package org.apache.linkis.cli.common.entity.present; - -public interface PresentWay {} +export default { + name: 'userConfig', + events: [], + component: () => import('./index.vue'), +}; \ No newline at end of file diff --git a/linkis-web/src/apps/linkis/module/userConfig/index.scss b/linkis-web/src/apps/linkis/module/userConfig/index.scss new file mode 100644 index 0000000000..29be39de74 --- /dev/null +++ b/linkis-web/src/apps/linkis/module/userConfig/index.scss @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +@import '@/common/style/variables.scss'; +.search-bar { + .search-item { + display: flex; + justify-content: flex-start; + align-items: center; + font-size: $font-size-base; + .lable { + flex-basis: 120px; + text-align: center; + } + } + .search { + margin-left: 25px; + } + .ivu-input-wrapper { + width: calc(100% - 105px); + } +} +.table-content { + margin-top: 25px; + height: 687px; +} +.form { + padding: 40px 50px; +} \ No newline at end of file diff --git a/linkis-web/src/apps/linkis/module/userConfig/index.vue b/linkis-web/src/apps/linkis/module/userConfig/index.vue new file mode 100644 index 0000000000..43f5baf66a --- /dev/null +++ b/linkis-web/src/apps/linkis/module/userConfig/index.vue @@ -0,0 +1,522 @@ + + + + + + + diff --git a/linkis-web/src/apps/linkis/router.js b/linkis-web/src/apps/linkis/router.js index a94beab0c5..0f11eca5d3 100644 --- a/linkis-web/src/apps/linkis/router.js +++ b/linkis-web/src/apps/linkis/router.js @@ -188,6 +188,16 @@ export default [ publicPage: true, }, }, + { + name: 'acrossClusterRule', + path: 'acrossClusterRule', + component: () => + import('./module/acrossClusterRule/index.vue'), + meta: { + title: 'acrossClusterRule', + publicPage: true, + }, + }, { name: 'gatewayAuthToken', path: 'gatewayAuthToken', @@ -298,6 +308,16 @@ export default [ publicPage: true, }, }, + { + name: 'configManagement', + path: 'configManagement', + component: () => + import('./module/configManagement/index.vue'), + meta: { + title: 'configManagement', + publicPage: true, + }, + }, { name: 'codeDetail', path: 'codeDetail', @@ -319,6 +339,16 @@ export default [ noLayout: true }, }, + { + name: 'userConfig', + path: 'userConfig', + component: () => + import('./module/userConfig/index.vue'), + meta: { + title: 'userConfig', + publicPage: true, + }, + }, ], }, ] diff --git a/linkis-web/src/apps/linkis/view/linkis/index.vue b/linkis-web/src/apps/linkis/view/linkis/index.vue index 88e5088af5..091e01e97c 100644 --- a/linkis-web/src/apps/linkis/view/linkis/index.vue +++ b/linkis-web/src/apps/linkis/view/linkis/index.vue @@ -30,28 +30,28 @@
{{item.name}}
+ v-if="(item.key === '1-8' || item.key === '1-9' || item.key === '1-10' || item.key === '1-12') && !item.showSubMenu">
+ v-if="isLogAdmin ? true : item3.key === '1-8-1' || item3.key === '1-9-2' || item3.key === '1-9-1'">
@@ -76,7 +76,7 @@ item.key === index diff --git a/linkis-web/src/common/i18n/en.json b/linkis-web/src/common/i18n/en.json index bd0a869d14..aac078b18a 100644 --- a/linkis-web/src/common/i18n/en.json +++ b/linkis-web/src/common/i18n/en.json @@ -308,7 +308,8 @@ "lineFilter": "Line Filter", "resultGroupLineFilter": "Result Group Line Filter", "resultGroup": "Result Group", - "all": "all", + "all": "All Results({count})", + "autoFormat": "Auto Format", "downloadMode": "Download Mode :", "deepAnalysis": "Deep Analysis", "success": { diff --git a/linkis-web/src/common/i18n/zh.json b/linkis-web/src/common/i18n/zh.json index 9491068eeb..688153101e 100644 --- a/linkis-web/src/common/i18n/zh.json +++ b/linkis-web/src/common/i18n/zh.json @@ -314,7 +314,8 @@ "lineFilter": "列筛选", "resultGroupLineFilter": "对结果集的列进行筛选", "resultGroup": "结果集", - "all": "是否全量", + "all": "所有结果集({count}个)", + "autoFormat": "自动格式化", "downloadMode": "下载方式:", "deepAnalysis": "分 析 进 阶", "success": { diff --git a/linkis-web/src/components/consoleComponent/result.vue b/linkis-web/src/components/consoleComponent/result.vue index 8c901ad7e8..f1c9940e9f 100644 --- a/linkis-web/src/components/consoleComponent/result.vue +++ b/linkis-web/src/components/consoleComponent/result.vue @@ -23,6 +23,7 @@ ref="toolbar" v-show="result.path" :current-path="result.path" + :all-path="result.allPath" :show-filter="tableData.type !== 'normal'" :script="script" :row="hightLightRow" @@ -248,9 +249,11 @@ export default { total: 0, path: '', cache: {}, + allPath: [] }; if (this.script.resultList && this.script.resultSet !== undefined) { - res = this.script.resultList[this.script.resultSet].result + res = this.script.resultList[this.script.resultSet].result + res.allPath = this.script.resultList.map(result => result.path); } if(!res && this.script.result){ res = this.script.result diff --git a/linkis-web/src/components/consoleComponent/resultSetList.vue b/linkis-web/src/components/consoleComponent/resultSetList.vue index 22f4e24e74..80c8a99d73 100644 --- a/linkis-web/src/components/consoleComponent/resultSetList.vue +++ b/linkis-web/src/components/consoleComponent/resultSetList.vue @@ -45,7 +45,7 @@ v-for="(item, index) in list" :class="{current: current-0 === index}" :data-index="index" - :key="item.path">{{$t('message.common.resultList')}}{{ index+1 }} + :key="item.path">{{$t('message.common.resultList')}}{{ +item.name.split('.')[0].substring(1) + 1 }}
@@ -73,7 +73,9 @@ export default { }, data() { return { - resultList: this.list, + resultList: this.list.sort((a, b) => { + return +(a.name.split('.')[0].substring(1)) - +(b.name.split('.')[0].substring(1)) + }), show: false, }; }, diff --git a/linkis-web/src/components/consoleComponent/toolbar.vue b/linkis-web/src/components/consoleComponent/toolbar.vue index ccbc4f738e..f169ab4358 100644 --- a/linkis-web/src/components/consoleComponent/toolbar.vue +++ b/linkis-web/src/components/consoleComponent/toolbar.vue @@ -148,8 +148,14 @@ {{$t('message.common.toolbar.downloadMode')}} - {{$t('message.common.toolbar.all')}} + {{$t('message.common.toolbar.all', {count: String(allPath.length)})}} +
+ + {{$t('message.common.toolbar.autoFormat')}} + +
+ @@ -224,6 +230,10 @@ export default { type: String, default: '', }, + allPath: { + type: Array, + default: () => [] + }, showFilter: { type: Boolean, default: false @@ -267,6 +277,7 @@ export default { isIconLabelShow: true, iconSize: 14, allDownload: false, // whether to download all result sets(是否下载全部结果集) + autoFormat: false, // whether to format result sets resultsShowType: '2', separators: [ { key: ',', label: this.$t('message.common.separator.comma'), value: '1', span: 4, offset: 0}, @@ -290,7 +301,10 @@ export default { return describe }, isAll() { - return ['hql', 'sql'].includes(this.script.runType) && this.download.format === '2'; + return ['hql', 'sql'].includes(this.script.runType) || this.download.format === '1'; + }, + isExcel() { + return this.download.format === '2'; }, rsDownload() { return storage.get('resultSetExportEnable'); @@ -338,6 +352,19 @@ export default { const resultType = this.resultsShowType === '1' ? 'visual' : 'dataWrangler'; this.$emit('on-analysis', resultType); }, + downloadFromHref(url) { + const link = document.createElement('a'); + link.setAttribute('href', url); + link.setAttribute('download', ''); + const evObj = document.createEvent('MouseEvents'); + evObj.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, false, false, true, false, 0, null); + return link.dispatchEvent(evObj) + }, + pause(msec = 1000) { + return new Promise((resolve) => { + setTimeout(resolve, msec); + }) + }, async downloadConfirm() { const splitor = this.download.format === '1' ? 'csv' : 'xlsx'; const charset = this.download.coding === '1' ? 'utf-8' : 'gbk'; @@ -355,11 +382,11 @@ export default { let temPath = this.currentPath; // The result set path of the api execution download is different(api执行下载的结果集路径不一样) let apiPath = `${this.getResultUrl}/resultsetToExcel`; - if (this.isAll && this.allDownload) { + if (this.isAll && this.allDownload && this.isExcel) { temPath = temPath.substring(0, temPath.lastIndexOf('/')); apiPath = `${this.getResultUrl}/resultsetsToExcel` } - let url = `http://${window.location.host}/api/rest_j/v1/` + apiPath +'?path=' + temPath + '&charset=' + charset + '&outputFileType=' + splitor + '&nullValue=' + nullValue + '&outputFileName=' + filename; + let url = `http://${window.location.host}/api/rest_j/v1/` + apiPath + '?charset=' + charset + '&outputFileType=' + splitor + '&nullValue=' + nullValue; // If the api execution page gets the result set, you need to bring the taskId(如果是api执行页获取结果集,需要带上taskId) if(this.getResultUrl !== 'filesystem') { url += `&taskId=${this.comData.taskID}` @@ -369,16 +396,43 @@ export default { let separator = encodeURIComponent(separatorItem.key || ''); url += `&csvSeparator=${separator}` } + if(this.isAll && this.isExcel) { + url += `&autoFormat=${this.autoFormat}` + } // Before downloading, use the heartbeat interface to confirm whether to log in(下载之前条用心跳接口确认是否登录) await api.fetch('/user/heartbeat', 'get'); - const link = document.createElement('a'); - link.setAttribute('href', url); - link.setAttribute('download', ''); - const evObj = document.createEvent('MouseEvents'); - evObj.initMouseEvent('click', true, true, window, 0, 0, 0, 0, 0, false, false, true, false, 0, null); - const flag = link.dispatchEvent(evObj); + const eventList = []; + let flag = null; + if (this.isAll && !this.isExcel && this.allDownload) { + let count = 0 + for(let path of this.allPath) { + let temUrl = url; + temUrl += `&path=${path}` + const name = `ResultSet${Number(path.substring(temPath.lastIndexOf('/')).split('.')[0].split('_')[1]) + 1}` + temUrl += '&outputFileName=' + name + const event = this.downloadFromHref(temUrl) + eventList.push(event); + if(++count >= 10) { + await this.pause(1000); + count = 0; + } + } + // this.allPath.forEach(path => { + // let temUrl = url; + // temUrl += `&path=${path}` + // const name = `ResultSet${Number(path.substring(temPath.lastIndexOf('/')).split('.')[0].split('_')[1]) + 1}` + // temUrl += '&outputFileName=' + name + // const event = this.downloadFromHref(temUrl) + // eventList.push(event); + // }); + } else { + url += `&path=${temPath}` + '&outputFileName=' + filename + flag = this.downloadFromHref(url); + } this.$nextTick(() => { - if (flag) { + if (flag && this.isExcel) { + this.$Message.success(this.$t('message.common.toolbar.success.download')); + } else if (!this.isExcel && !eventList.includes(false)) { this.$Message.success(this.$t('message.common.toolbar.success.download')); } }); diff --git a/linkis-web/src/components/table/table.css b/linkis-web/src/components/table/table.css index 423c5b3108..3cd1242847 100644 --- a/linkis-web/src/components/table/table.css +++ b/linkis-web/src/components/table/table.css @@ -142,16 +142,29 @@ overflow: auto; } .bottom-td { - overflow: hidden; - white-space: nowrap; box-sizing: border-box; text-align: center; - text-overflow: ellipsis; vertical-align: middle; border-bottom: 1px solid #e8eaec; border-right: 1px solid #e8eaec; transition: background-color 0.2s ease-in-out; } +.scroll { + white-space: nowrap; + text-overflow: ellipsis; + overflow: hidden; +} +.scroll::-webkit-scrollbar { + display: none; +} +.scroll::-webkit-scrollbar-thumb { + background: transparent +} +.wrap { + white-space: pre-line; + word-break: break-all; +} + .trselect td { background-color: #ebf7ff; } diff --git a/linkis-web/src/components/table/table.vue b/linkis-web/src/components/table/table.vue index aa13f59ac3..e3ca03d90f 100644 --- a/linkis-web/src/components/table/table.vue +++ b/linkis-web/src/components/table/table.vue @@ -71,44 +71,49 @@ style="width:100%;table-layout:fixed;" :style="{height:`${loadedNum*tdHeight}px`}" > - - + - {{indexs+dataTop/tdHeight+1}} - - - + v-if="columns[0].type=='select'" + > + + +
暂无数据 @@ -324,6 +329,13 @@ export default { } } }, + hasLineBreak(content) { + if (content.split(/\n/).length > 1) { + return true + } + return false + }, + //scroll bar scroll down(滚动条向下滚动) handleScrollBottom() { if (this.dataTop > this.scrollTop) { diff --git a/linkis-web/src/components/virtualTable/historyTable/historyTable.vue b/linkis-web/src/components/virtualTable/historyTable/historyTable.vue index 420d6dcede..263907d1af 100644 --- a/linkis-web/src/components/virtualTable/historyTable/historyTable.vue +++ b/linkis-web/src/components/virtualTable/historyTable/historyTable.vue @@ -52,21 +52,22 @@ :key="th.key" :style="{'text-align': th.align}" class="we-table-row-cell"> -
- - {{ td[th.key] }} -
+ +
+ + {{ td[th.key] }} +
+
diff --git a/linkis-web/src/dss/module/resourceSimple/engine.vue b/linkis-web/src/dss/module/resourceSimple/engine.vue index b603f10df4..fea0479b0f 100644 --- a/linkis-web/src/dss/module/resourceSimple/engine.vue +++ b/linkis-web/src/dss/module/resourceSimple/engine.vue @@ -21,135 +21,138 @@ v-if="loading" size="large" fix/> -
-
-

{{ $t('message.common.resourceSimple.YS') }}

-
- {{ $t('message.common.resourceSimple.FL') }} - -
-
+
- {{ calssifyName(item) }} -
    - -
-
-
-
-
-

{{ $t('message.common.resourceSimple.ZH') }}

-
- {{ $t('message.common.resourceSimple.FL') }} - + class="engine-content" + v-if="ideEngineList.length > 0"> +
+

{{ $t('message.common.resourceSimple.YS') }}

+
+ {{ $t('message.common.resourceSimple.FL') }} + +
+
+
+ {{ calssifyName(item) }} +
    + +
- {{ calssifyName(item) }} -
    - -
-
-
-
-
-

Other

-
- {{ $t('message.common.resourceSimple.FL') }} - + class="engine-content" + v-if="boardEngineList.length > 0"> +
+

{{ $t('message.common.resourceSimple.ZH') }}

+
+ {{ $t('message.common.resourceSimple.FL') }} + +
+
+
+ {{ calssifyName(item) }} +
    + +
- {{ calssifyName(item) }} -
    - -
+ class="engine-content" + v-if="otherEngineList.length > 0"> +
+

Other

+
+ {{ $t('message.common.resourceSimple.FL') }} + +
+
+
+ {{ calssifyName(item) }} +
    + +
+
+ {{ $t('message.common.resourceSimple.ZWSJ') }} diff --git a/linkis-web/src/dss/module/resourceSimple/index.scss b/linkis-web/src/dss/module/resourceSimple/index.scss index 783e44f840..71478c8a33 100644 --- a/linkis-web/src/dss/module/resourceSimple/index.scss +++ b/linkis-web/src/dss/module/resourceSimple/index.scss @@ -47,9 +47,20 @@ } } } + .queue-app { + display: grid; + grid-template-columns: 1fr 1fr; + grid-gap: 10px; + .queue-app-item { + text-align: left; + } + .pl60 { + padding-left: 20px; + } + } .queue-manager-top { .queue-manager-top-content { - height: 180px; + height: 134px; overflow-y: auto; } .queue-manager-item { @@ -296,9 +307,9 @@ } } // engine manager style(引擎管理器样式) + .engine-box { height: 100%; - overflow-y: auto; position: $relative; .no-data { font-size: $font-size-base; @@ -325,114 +336,120 @@ background: $tooltip-color ; } } -.engine-content { - padding: 0 15px; - .engine-header-bar { - display: flex; - justify-content: space-between; - align-items: center; - margin-top: 10px; - margin-bottom: 10px; - .data-type-title { - font-size: 18px; - font-weight: bold; - } - .classify { - flex-basis: 30%; +.engine-content-wrapper { + + overflow-y: auto; + height: 100%; + .engine-content { + padding: 0 15px; + .engine-header-bar { display: flex; - justify-content: space-around; + justify-content: space-between; align-items: center; - >span { - margin-right: 5px; - width: 40px; - font-size: $font-size-base; + margin-top: 10px; + margin-bottom: 10px; + .data-type-title { + font-size: 18px; + font-weight: bold; + } + .classify { + flex-basis: 30%; + display: flex; + justify-content: space-around; + align-items: center; + >span { + margin-right: 5px; + width: 40px; + font-size: $font-size-base; + } } } - } - .engine-list { - display: flex; - align-items: center; - padding-left: 30px; - .engline-name { - flex-basis: 62px; - font-size: 16px; - margin-right: 20px; - } - .engine-ul { - flex: 1; + .engine-list { display: flex; - justify-content: flex-start; align-items: center; - flex-wrap: wrap; - .engine-li { - border: $border-width-base $border-style-base $border-color-base; - border-radius: $border-radius-base; - padding: 5px; - position: $relative; - width: 60px; - height: 60px; - margin: 8px 20px 8px 0; - box-sizing: border-box; + padding-left: 30px; + .engline-name { + flex-basis: 62px; + font-size: 16px; + margin-right: 20px; + } + .engine-ul { + flex: 1; display: flex; - justify-content: center; + justify-content: flex-start; align-items: center; - &:hover { - background-color: $background-color-base; + flex-wrap: wrap; + .engine-li { + border: $border-width-base $border-style-base $border-color-base; border-radius: $border-radius-base; - cursor: pointer; - } - &.active.yellow { - border: $border-width-base $border-style-base $yellow-color; - background-color: $background-color-base; - } - &.active.green { - border: $border-width-base $border-style-base $success-color; - background-color: $background-color-base; - } - &.active.blue { - border: $border-width-base $border-style-base $primary-color; - background-color: $background-color-base; - } - .engine-icon { - &::before { - font-size: 40px; + padding: 5px; + position: $relative; + width: 60px; + height: 60px; + margin: 8px 20px 8px 0; + box-sizing: border-box; + display: flex; + justify-content: center; + align-items: center; + &:hover { + background-color: $background-color-base; + border-radius: $border-radius-base; + cursor: pointer; } - } - .engine-icon.yellow { - &::before { - color: $yellow-color; + &.active.yellow { + border: $border-width-base $border-style-base $yellow-color; + background-color: $background-color-base; } - } - .engine-icon.green { - &::before { - color: $success-color; + &.active.green { + border: $border-width-base $border-style-base $success-color; + background-color: $background-color-base; } - } - .engine-icon.blue { - &::before { - color: $primary-color; + &.active.blue { + border: $border-width-base $border-style-base $primary-color; + background-color: $background-color-base; } - } - .engine-right { - position: $absolute; - top: 0; - left: 0; - border-radius: 0 0 5px 0; - font-weight: bold; - &:before { - color: $background-color-base; + .engine-icon { + &::before { + font-size: 40px; + } + } + .engine-icon.yellow { + &::before { + color: $yellow-color; + } } - &.yellow { - background-color: $yellow-color; + .engine-icon.green { + &::before { + color: $success-color; + } } - &.green { - background-color: $success-color; + .engine-icon.blue { + &::before { + color: $primary-color; + } } - &.blue { - background-color: $primary-color; + .engine-right { + position: $absolute; + top: 0; + left: 0; + border-radius: 0 0 5px 0; + font-weight: bold; + &:before { + color: $background-color-base; + } + &.yellow { + background-color: $yellow-color; + } + &.green { + background-color: $success-color; + } + &.blue { + background-color: $primary-color; + } } } } } } } + diff --git a/linkis-web/src/dss/module/resourceSimple/queue.vue b/linkis-web/src/dss/module/resourceSimple/queue.vue index cd37f63d46..8d510e2822 100644 --- a/linkis-web/src/dss/module/resourceSimple/queue.vue +++ b/linkis-web/src/dss/module/resourceSimple/queue.vue @@ -55,9 +55,21 @@ suffixe="GB" width="120px" height="120px" - :title="$t('message.common.resourceSimple.NC')"> + :title="$t('message.common.resourceSimple.NC')" + class="queue-manager-circle"> +
+
+
Num Pending Applications: {{infos.queueInfo.numPendingApps}}
+
l
{{ $t('message.common.resourceSimple.ZYSYPHB') }}
- 1.3.2-SNAPSHOT + 1.1.14-wedatasphere 2.9.2 2.4.3 2.7.2 @@ -146,6 +146,7 @@ 1.21 1.10 1.3 + 1.5 3.16.3 3.3.0 9.3 @@ -949,6 +950,14 @@ test + + + org.instancio + instancio-junit + 2.16.1 + test + + org.mockito mockito-core @@ -1407,9 +1416,9 @@ org.apache.maven.plugins maven-surefire-plugin - --add-opens java.base/java.util=ALL-UNNAMED - --add-opens java.base/java.nio=ALL-UNNAMED - --add-opens java.base/java.lang=ALL-UNNAMED + + + diff --git a/tool/dependencies/known-dependencies.txt b/tool/dependencies/known-dependencies.txt index 62f2e364a2..06ea1b3362 100644 --- a/tool/dependencies/known-dependencies.txt +++ b/tool/dependencies/known-dependencies.txt @@ -32,6 +32,7 @@ avatica-1.8.0.jar avatica-metrics-1.8.0.jar avro-1.7.4.jar avro-1.7.7.jar +avro-1.11.0.jar bcpkix-jdk15on-1.64.jar bcprov-jdk15on-1.64.jar bonecp-0.8.0.RELEASE.jar @@ -64,7 +65,7 @@ commons-configuration-1.10.jar commons-daemon-1.0.13.jar commons-dbcp-1.4.jar commons-exec-1.3.jar -commons-fileupload-1.4.jar +commons-fileupload-1.5.jar commons-httpclient-3.1.jar commons-io-2.11.0.jar commons-jxpath-1.3.jar @@ -88,6 +89,7 @@ datanucleus-api-jdo-4.2.4.jar datanucleus-core-4.1.17.jar datanucleus-rdbms-4.1.19.jar derby-10.14.2.0.jar +disruptor-3.4.0.jar disruptor-3.3.0.jar dropwizard-metrics-hadoop-metrics2-reporter-0.1.2.jar druid-1.1.22.jar @@ -523,6 +525,7 @@ spring-jdbc-5.2.22.RELEASE.jar spring-plugin-core-2.0.0.RELEASE.jar spring-plugin-metadata-2.0.0.RELEASE.jar spring-security-crypto-5.3.9.RELEASE.jar +spring-security-crypto-5.7.5.jar spring-security-rsa-1.0.9.RELEASE.jar spring-tx-5.2.22.RELEASE.jar spring-web-5.2.22.RELEASE.jar