diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala index 14febab63a..9bfa053b77 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/conf/BDPConfiguration.scala @@ -232,19 +232,20 @@ private[conf] object BDPConfiguration extends Logging { private[common] def formatValue[T](defaultValue: T, value: Option[String]): Option[T] = { if (value.isEmpty || value.exists(StringUtils.isEmpty)) return Option(defaultValue) + val trimValue = value.map(_.trim) val formattedValue = defaultValue match { - case _: String => value - case _: Byte => value.map(_.toByte) - case _: Short => value.map(_.toShort) - case _: Char => value.map(_.toCharArray.apply(0)) - case _: Int => value.map(_.toInt) - case _: Long => value.map(_.toLong) - case _: Float => value.map(_.toFloat) - case _: Double => value.map(_.toDouble) - case _: Boolean => value.map(_.toBoolean) - case _: TimeType => value.map(new TimeType(_)) - case _: ByteType => value.map(new ByteType(_)) - case null => value + case _: String => trimValue + case _: Byte => trimValue.map(_.toByte) + case _: Short => trimValue.map(_.toShort) + case _: Char => trimValue.map(_.toCharArray.apply(0)) + case _: Int => trimValue.map(_.toInt) + case _: Long => trimValue.map(_.toLong) + case _: Float => trimValue.map(_.toFloat) + case _: Double => trimValue.map(_.toDouble) + case _: Boolean => trimValue.map(_.toBoolean) + case _: TimeType => trimValue.map(new TimeType(_)) + case _: ByteType => trimValue.map(new ByteType(_)) + case null => trimValue } formattedValue.asInstanceOf[Option[T]] } diff --git a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala index 77c82f3883..e558e765be 100644 --- a/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala +++ b/linkis-commons/linkis-common/src/main/scala/org/apache/linkis/common/log/LogUtils.scala @@ -33,7 +33,7 @@ object LogUtils { } def generateERROR(rawLog: String): String = { - getTimeFormat + " " + "ERROR" + " " + rawLog + getTimeFormat + " " + ERROR_STR + " " + rawLog } def generateWarn(rawLog: String): String = { @@ -52,4 +52,6 @@ object LogUtils { getTimeFormat + " " + "SYSTEM-WARN" + " " + rawLog } + val ERROR_STR = "ERROR" + } diff --git a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala index 9b2be16ef7..3affc351d9 100644 --- a/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala +++ b/linkis-commons/linkis-protocol/src/main/scala/org/apache/linkis/protocol/utils/TaskUtils.scala @@ -45,6 +45,14 @@ object TaskUtils { } } else params.put(key, waitToAdd) + private def clearMap(params: util.Map[String, AnyRef], key: String): Unit = + if (params != null && params.containsKey(key)) { + params.get(key) match { + case map: util.Map[String, AnyRef] => map.clear() + case _ => params.put(key, new util.HashMap[String, AnyRef]()) + } + } + private def getConfigurationMap( params: util.Map[String, AnyRef], key: String @@ -84,13 +92,20 @@ object TaskUtils { def addStartupMap(params: util.Map[String, AnyRef], startupMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, startupMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + def clearStartupMap(params: util.Map[String, AnyRef]): Unit = { + val configurationMap = getMap(params, TaskConstant.PARAMS_CONFIGURATION) + if (!configurationMap.isEmpty) { + clearMap(configurationMap, TaskConstant.PARAMS_CONFIGURATION_STARTUP) + } + } + def addRuntimeMap(params: util.Map[String, AnyRef], runtimeMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, runtimeMap, TaskConstant.PARAMS_CONFIGURATION_RUNTIME) def addSpecialMap(params: util.Map[String, AnyRef], specialMap: util.Map[String, AnyRef]): Unit = addConfigurationMap(params, specialMap, TaskConstant.PARAMS_CONFIGURATION_SPECIAL) - // tdoo + // todo def getLabelsMap(params: util.Map[String, AnyRef]): util.Map[String, AnyRef] = getMap(params, TaskConstant.LABELS) diff --git a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java index 0f93cdb6ab..2809c83eec 100644 --- a/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java +++ b/linkis-commons/linkis-storage/src/main/java/org/apache/linkis/storage/utils/FileSystemUtils.java @@ -96,6 +96,30 @@ public static void createNewFileWithFileSystem( } } + /** + * create new file and set file owner by FileSystem + * + * @param fileSystem + * @param filePath + * @param user + * @param createParentWhenNotExists + */ + public static void createNewFileAndSetOwnerWithFileSystem( + FileSystem fileSystem, FsPath filePath, String user, boolean createParentWhenNotExists) + throws Exception { + if (!fileSystem.exists(filePath)) { + if (!fileSystem.exists(filePath.getParent())) { + if (!createParentWhenNotExists) { + throw new IOException( + "parent dir " + filePath.getParent().getPath() + " dose not exists."); + } + mkdirs(fileSystem, filePath.getParent(), user); + } + fileSystem.createNewFile(filePath); + fileSystem.setOwner(filePath, user); + } + } + /** * Recursively create a directory * @@ -133,4 +157,39 @@ public static boolean mkdirs(FileSystem fileSystem, FsPath dest, String user) th } return true; } + + /** + * Recursively create a directory(递归创建目录) add owner info + * + * @param fileSystem + * @param dest + * @param user + * @throws IOException + * @return + */ + public static boolean mkdirsAndSetOwner(FileSystem fileSystem, FsPath dest, String user) + throws IOException { + FsPath parentPath = dest.getParent(); + Stack dirsToMake = new Stack<>(); + dirsToMake.push(dest); + while (!fileSystem.exists(parentPath)) { + dirsToMake.push(parentPath); + + if (Objects.isNull(parentPath.getParent())) { + // parent path of root is null + break; + } + + parentPath = parentPath.getParent(); + } + if (!fileSystem.canExecute(parentPath)) { + throw new IOException("You have not permission to access path " + dest.getPath()); + } + while (!dirsToMake.empty()) { + FsPath path = dirsToMake.pop(); + fileSystem.mkdir(path); + fileSystem.setOwner(path, user); + } + return true; + } } diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java index 0c8a3db539..2b0b20188a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/interactor/job/interactive/InteractiveJobDescBuilder.java @@ -26,6 +26,7 @@ import org.apache.linkis.cli.application.operator.ujes.LinkisJobOper; import org.apache.linkis.cli.application.operator.ujes.UJESClientFactory; import org.apache.linkis.cli.application.utils.CliUtils; +import org.apache.linkis.cli.application.utils.LoggerManager; import org.apache.commons.lang3.StringUtils; @@ -135,7 +136,12 @@ public static InteractiveJobDesc build(CliCtx ctx) { } if (StringUtils.isBlank(code) && StringUtils.isNotBlank(codePath)) { - code = CliUtils.readFile(codePath); + try { + code = CliUtils.readFile(codePath); + } catch (Exception e) { + LoggerManager.getInformationLogger().error("Failed to read file", e); + throw e; + } } executionMap.put(LinkisKeys.KEY_CODE, code); @@ -143,6 +149,9 @@ public static InteractiveJobDesc build(CliCtx ctx) { labelMap.put(LinkisKeys.KEY_CODETYPE, runType); labelMap.put(LinkisKeys.KEY_USER_CREATOR, proxyUsr + "-" + creator); sourceMap.put(LinkisKeys.KEY_SCRIPT_PATH, scriptPath); + if (ctx.getExtraMap().containsKey(CliKeys.VERSION)) { + sourceMap.put(LinkisKeys.CLI_VERSION, ctx.getExtraMap().get(CliKeys.VERSION)); + } runtimeMap.put(LinkisKeys.KEY_HIVE_RESULT_DISPLAY_TBALE, true); desc.setCreator(creator); diff --git a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java index 9a54699165..c2d47e2b7a 100644 --- a/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java +++ b/linkis-computation-governance/linkis-client/linkis-cli/src/main/java/org/apache/linkis/cli/application/present/file/ResultFileWriter.java @@ -32,7 +32,6 @@ public static void writeToFile( String pathName, String fileName, String content, Boolean overWrite) { File dir = new File(pathName); - File file = new File(fileName); if (!dir.exists()) { try { @@ -47,6 +46,8 @@ public static void writeToFile( } } + File file = new File(dir.getAbsolutePath() + File.separator + fileName); + if (overWrite || !file.exists()) { try { file.createNewFile(); diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java new file mode 100644 index 0000000000..13cbac5577 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/entity/TemplateConfKey.java @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.entity; + +public class TemplateConfKey { + + private String templateUuid; + + private String key; + + private String templateName; + + private String configValue; + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + @Override + public String toString() { + return "TemplateKey{" + + "templateUuid='" + + templateUuid + + '\'' + + ", key='" + + key + + '\'' + + ", templateName='" + + templateName + + '\'' + + ", configValue='" + + configValue + + '\'' + + '}'; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java new file mode 100644 index 0000000000..e8b566cda1 --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfRequest.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.protocol.message.RequestProtocol; + +public class TemplateConfRequest implements RequestProtocol { + + private String templateUuid; + + private String templateName; + + public TemplateConfRequest(String templateUuid, String templateName) { + this.templateUuid = templateUuid; + this.templateName = templateName; + } + + public TemplateConfRequest(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java new file mode 100644 index 0000000000..8822fe988d --- /dev/null +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/org/apache/linkis/governance/common/protocol/conf/TemplateConfResponse.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.governance.common.protocol.conf; + +import org.apache.linkis.governance.common.entity.TemplateConfKey; + +import java.util.ArrayList; +import java.util.List; + +public class TemplateConfResponse { + + private List list = new ArrayList<>(); + + public List getList() { + return list; + } + + public void setList(List list) { + this.list = list; + } +} diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala index a4671eaa17..b8b156173b 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConf.scala @@ -90,10 +90,4 @@ object GovernanceCommonConf { val EC_APP_MANAGE_MODE = CommonVars("linkis.ec.app.manage.mode", "attach") - val SCALA_PARSE_APPEND_CODE_ENABLED = - CommonVars("linkis.scala.parse.append.code.enable", true).getValue - - val SCALA_PARSE_APPEND_CODE = - CommonVars("linkis.scala.parse.append.code", "val linkisVar=1").getValue - } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala index 544dfcdab6..ec7bb9e80a 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/exception/GovernanceErrorException.scala @@ -17,7 +17,7 @@ package org.apache.linkis.governance.common.exception -import org.apache.linkis.common.exception.{ErrorException, ExceptionLevel, LinkisRuntimeException} +import org.apache.linkis.common.exception.{ExceptionLevel, LinkisRuntimeException} class GovernanceErrorException(errorCode: Int, errorMsg: String) extends LinkisRuntimeException(errorCode, errorMsg) { diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala index 87576d5e48..64ece62fd7 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/org/apache/linkis/governance/common/paser/CodeParser.scala @@ -86,6 +86,11 @@ abstract class CombinedEngineCodeParser extends CodeParser { } +/** + * Scala is no longer using Parser but instead using EmptyParser. If there is a comment at the end, + * it will cause the task to become stuck + */ +@deprecated class ScalaCodeParser extends SingleCodeParser with Logging { override val codeType: CodeType = CodeType.Scala @@ -109,11 +114,9 @@ class ScalaCodeParser extends SingleCodeParser with Logging { case _ => } if (statementBuffer.nonEmpty) codeBuffer.append(statementBuffer.mkString("\n")) - - // Append code `val linkisVar=1` in ends to prevent bugs that do not exit tasks for a long time - if (GovernanceCommonConf.SCALA_PARSE_APPEND_CODE_ENABLED) { - codeBuffer.append(GovernanceCommonConf.SCALA_PARSE_APPEND_CODE) - } + // Make sure the last line is not a comment + codeBuffer.append("\n") + codeBuffer.append("val linkisVar=123") codeBuffer.toArray } diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala index db7045baec..04adf3446c 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/paser/ScalaCodeParserTest.scala @@ -29,7 +29,8 @@ class ScalaCodeParserTest { "val codeBuffer = new ArrayBuffer[String]()\n val statementBuffer = new ArrayBuffer[String]()" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(scalaCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.size == 3) + } @Test @@ -40,7 +41,7 @@ class ScalaCodeParserTest { " def addInt( a:Int, b:Int )\n var sum:Int = 0\n sum = a + b\n return sum\n }" val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(abnormalCode) - Assertions.assertTrue(array.length == 2) + Assertions.assertTrue(array.length == 3) } @@ -53,7 +54,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val array = scalaCodeParser.parse(importCode) - Assertions.assertTrue(array.length == 3) + Assertions.assertTrue(array.length == 4) } @@ -67,7 +68,7 @@ class ScalaCodeParserTest { val scalaCodeParser = new ScalaCodeParser val arrayResult1 = scalaCodeParser.parse(specialCodeExp1) - Assertions.assertTrue(arrayResult1.length == 3) + Assertions.assertTrue(arrayResult1.length == 4) val specialCodeExp2 = " @BeanProperty\n var id: Long = _\n @BeanProperty\n var status: Int = 0\n " + @@ -78,7 +79,7 @@ class ScalaCodeParserTest { ".append(data, that.data)\n .isEquals\n }" val arrayResult2 = scalaCodeParser.parse(specialCodeExp2) - Assertions.assertTrue(arrayResult2.length == 2) + Assertions.assertTrue(arrayResult2.length == 3) } diff --git a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml index 6d26ae863a..85050d4d21 100644 --- a/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml +++ b/linkis-dist/helm/charts/linkis/templates/configmap-init-sql.yaml @@ -26,6 +26,8 @@ data: DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; DROP TABLE IF EXISTS `linkis_ps_configuration_config_value`; DROP TABLE IF EXISTS `linkis_ps_configuration_category`; + DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; + DROP TABLE IF EXISTS `linkis_ps_configutation_lm_across_cluster_rule`; DROP TABLE IF EXISTS `linkis_ps_job_history_group_history`; DROP TABLE IF EXISTS `linkis_ps_job_history_detail`; DROP TABLE IF EXISTS `linkis_ps_common_lock`; @@ -84,19 +86,23 @@ data: DROP TABLE IF EXISTS `linkis_mg_gateway_auth_token`; {{- end }} - CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_config_key`( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', - `description` varchar(200) DEFAULT NULL, - `name` varchar(50) DEFAULT NULL, - `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', - `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', - `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', - `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', - `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + CREATE TABLE `linkis_ps_configuration_config_key`( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', + `description` varchar(200) DEFAULT NULL, + `name` varchar(50) DEFAULT NULL, + `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', + `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', + `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', + `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -132,7 +138,37 @@ data: PRIMARY KEY (`id`), UNIQUE INDEX(`label_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; - + CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT 'username', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT 'Combined label combined_userCreator_engineType such as hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + id INT AUTO_INCREMENT COMMENT 'Rule ID, auto-increment primary key', + cluster_name char(32) NOT NULL COMMENT 'Cluster name, cannot be empty', + creator char(32) NOT NULL COMMENT 'Creator, cannot be empty', + username char(32) NOT NULL COMMENT 'User, cannot be empty', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Creation time, cannot be empty', + create_by char(32) NOT NULL COMMENT 'Creator, cannot be empty', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Modification time, cannot be empty', + update_by char(32) NOT NULL COMMENT 'Updater, cannot be empty', + rules varchar(256) NOT NULL COMMENT 'Rule content, cannot be empty', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT 'Is it valid Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; -- -- New linkis job -- @@ -146,7 +182,7 @@ data: `labels` text DEFAULT NULL COMMENT 'job labels', `params` text DEFAULT NULL COMMENT 'job params', `progress` varchar(32) DEFAULT NULL COMMENT 'Job execution progress', - `status` varchar(50) DEFAULT NULL COMMENT 'Script execution status, must be one of the following: Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout', + `status` varchar(50) DEFAULT NULL, `log_path` varchar(200) DEFAULT NULL COMMENT 'File path of the job log', `error_code` int DEFAULT NULL COMMENT 'Error code. Generated when the execution of the script fails', `error_desc` varchar(1000) DEFAULT NULL COMMENT 'Execution description. Generated when the execution of script fails', @@ -180,6 +216,7 @@ data: CREATE TABLE IF NOT EXISTS `linkis_ps_common_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `locker` varchar(255) COLLATE utf8_bin NOT NULL, `time_out` longtext COLLATE utf8_bin, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, @@ -195,6 +232,8 @@ data: CREATE TABLE IF NOT EXISTS `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -207,6 +246,8 @@ data: `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `shared_group` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -214,7 +255,9 @@ data: ( `id` bigint(20) PRIMARY KEY NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, - `user_name` varchar(50) NOT NULL + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -229,7 +272,8 @@ data: `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', - PRIMARY KEY (`id`) + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -241,7 +285,10 @@ data: `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; CREATE TABLE IF NOT EXISTS `linkis_ps_udf_baseinfo` ( @@ -271,6 +318,7 @@ data: `use_format` varchar(255) DEFAULT NULL, `description` varchar(255) NOT NULL COMMENT 'version desc', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -503,7 +551,8 @@ data: `max_version` int(20) DEFAULT 10 COMMENT 'The default is 10, which means to keep the latest 10 versions', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', - `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + `enable_flag` tinyint(1) NOT NULL DEFAULT '1' , + unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; @@ -522,7 +571,7 @@ data: `end_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Stoped time', `client_ip` varchar(200) NOT NULL COMMENT 'Client ip', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', - `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + `enable_flag` tinyint(1) NOT NULL DEFAULT '1', unique key `resource_id_version`(`resource_id`, `version`), PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; @@ -565,7 +614,7 @@ data: `operation` varchar(20) NOT NULL COMMENT 'Operation type. upload = 0, update = 1', `state` varchar(20) NOT NULL DEFAULT 'Schduled' COMMENT 'Current status of the task:Schduled, Running, Succeed, Failed,Cancelled', `submit_user` varchar(20) NOT NULL DEFAULT '' COMMENT 'Job submission user name', - `system` varchar(20) DEFAULT 'dss' COMMENT 'Subsystem name: wtss', + `system` varchar(20) DEFAULT 'dss', `instance` varchar(128) NOT NULL COMMENT 'Material library example', `client_ip` varchar(50) DEFAULT NULL COMMENT 'Request IP', `extra_params` text COMMENT 'Additional key information. Such as the resource IDs and versions that are deleted in batches, and all versions under the resource are deleted', @@ -1008,12 +1057,6 @@ data: INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); - INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -- hive INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive'); @@ -1231,8 +1274,9 @@ data: INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); - INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); - INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12001','队列CPU资源不足,可以调整Spark执行器个数','Queue CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12002','队列内存资源不足,可以调整Spark执行器个数','Insufficient queue memory',0); @@ -1256,8 +1300,11 @@ data: INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0); + -- 21 cluster Authority 22 db Authority - INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0); + INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue ([A-Za-z._0-9]+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0); diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql index 739fa0ba71..f7f8c3547b 100644 --- a/linkis-dist/package/db/linkis_ddl.sql +++ b/linkis-dist/package/db/linkis_ddl.sql @@ -30,23 +30,24 @@ SET FOREIGN_KEY_CHECKS=0; DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`; CREATE TABLE `linkis_ps_configuration_config_key`( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', - `description` varchar(200) DEFAULT NULL, - `name` varchar(50) DEFAULT NULL, - `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', - `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', - `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', - `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', - `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', - `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', - `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', - `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', + `description` varchar(200) DEFAULT NULL, + `name` varchar(50) DEFAULT NULL, + `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', + `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', + `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', + `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', + PRIMARY KEY (`id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; @@ -84,6 +85,43 @@ CREATE TABLE `linkis_ps_configuration_category` ( UNIQUE INDEX `uniq_label_id` (`label_id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; + + +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT 'username', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT 'Combined label combined_userCreator_engineType such as hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) +)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configutation_lm_across_cluster_rule`; +CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + id INT AUTO_INCREMENT COMMENT 'Rule ID, auto-increment primary key', + cluster_name char(32) NOT NULL COMMENT 'Cluster name, cannot be empty', + creator char(32) NOT NULL COMMENT 'Creator, cannot be empty', + username char(32) NOT NULL COMMENT 'User, cannot be empty', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Creation time, cannot be empty', + create_by char(32) NOT NULL COMMENT 'Creator, cannot be empty', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Modification time, cannot be empty', + update_by char(32) NOT NULL COMMENT 'Updater, cannot be empty', + rules varchar(256) NOT NULL COMMENT 'Rule content, cannot be empty', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT 'Is it valid Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + -- -- New linkis job -- @@ -135,6 +173,7 @@ DROP TABLE IF EXISTS `linkis_ps_common_lock`; CREATE TABLE `linkis_ps_common_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, + `locker` varchar(255) COLLATE utf8_bin NOT NULL, `time_out` longtext COLLATE utf8_bin, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, @@ -153,6 +192,8 @@ DROP TABLE IF EXISTS `linkis_ps_udf_manager`; CREATE TABLE `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -166,6 +207,8 @@ CREATE TABLE `linkis_ps_udf_shared_group` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `shared_group` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -174,7 +217,9 @@ CREATE TABLE `linkis_ps_udf_shared_info` ( `id` bigint(20) PRIMARY KEY NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, - `user_name` varchar(50) NOT NULL + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -190,7 +235,8 @@ CREATE TABLE `linkis_ps_udf_tree` ( `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', - PRIMARY KEY (`id`) + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -203,7 +249,10 @@ CREATE TABLE `linkis_ps_udf_user_load` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; @@ -235,6 +284,7 @@ CREATE TABLE `linkis_ps_udf_version` ( `use_format` varchar(255) DEFAULT NULL, `description` varchar(255) NOT NULL COMMENT 'version desc', `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -486,6 +536,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', `updator` varchar(50) DEFAULT NULL COMMENT 'updator', `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) ) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; diff --git a/linkis-dist/package/db/linkis_ddl_pg.sql b/linkis-dist/package/db/linkis_ddl_pg.sql index c205d76590..8d66c0dd8e 100644 --- a/linkis-dist/package/db/linkis_ddl_pg.sql +++ b/linkis-dist/package/db/linkis_ddl_pg.sql @@ -40,10 +40,12 @@ CREATE TABLE linkis_ps_configuration_config_key ( is_hidden bool NULL, is_advanced bool NULL, "level" int2 NULL, + boundary_type int2 null, "treeName" varchar(20) NULL, en_description varchar(200) NULL, en_name varchar(100) NULL, "en_treeName" varchar(100) NULL, + `boundary_type` int2 NOT NULL, CONSTRAINT linkis_configuration_config_key_pkey PRIMARY KEY (id) ); COMMENT ON COLUMN "linkis_ps_configuration_config_key"."key" IS 'Set key, e.g. spark.executor.instances'; @@ -58,7 +60,7 @@ COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'Reserved f COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'english description'; COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'english name'; COMMENT ON COLUMN "linkis_ps_configuration_config_key"."treeName" IS 'english treeName'; - +CREATE UNIQUE INDEX uniq_key_ectype ON linkis_ps_configuration_config_key USING btree ("key","engine_conn_type"); DROP TABLE IF EXISTS "linkis_ps_configuration_key_engine_relation"; CREATE TABLE linkis_ps_configuration_key_engine_relation ( @@ -98,6 +100,53 @@ CREATE TABLE linkis_ps_configuration_category ( ); CREATE UNIQUE INDEX uniq_label_id_cc ON linkis_ps_configuration_category USING btree (label_id); +DROP TABLE IF EXISTS linkis_ps_configuration_template_config_key; +CREATE TABLE linkis_ps_configuration_template_config_key ( + id BIGINT PRIMARY KEY NOT NULL, + template_name VARCHAR(200) NOT NULL, + template_uuid VARCHAR(36) NOT NULL, + key_id BIGINT NOT NULL, + config_value VARCHAR(200) NULL DEFAULT NULL, + max_value VARCHAR(50) NULL DEFAULT NULL, + min_value VARCHAR(50) NULL DEFAULT NULL, + validate_range VARCHAR(50) NULL DEFAULT NULL, + is_valid VARCHAR(2) DEFAULT 'Y', + create_by VARCHAR(50) NOT NULL, + create_time TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(), + update_by VARCHAR(50) NULL DEFAULT NULL, + update_time TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW() +); + +DROP TABLE IF EXISTS linkis_ps_configuration_key_limit_for_user; +CREATE TABLE IF NOT EXISTS linkis_ps_configuration_key_limit_for_user ( + id bigint NOT NULL GENERATED ALWAYS AS IDENTITY (START WITH 1), + user_name varchar(50) NOT NULL, + combined_label_value varchar(128) NOT NULL, + key_id bigint NOT NULL, + config_value varchar(200) NULL DEFAULT NULL, + max_value varchar(50) NULL DEFAULT NULL, + min_value varchar(50) NULL DEFAULT NULL, + latest_update_template_uuid varchar(36) NOT NULL, + is_valid varchar(2) DEFAULT 'Y', + create_by varchar(50) NOT NULL, + create_time timestamp without time zone DEFAULT now(), + update_by varchar(50) NULL DEFAULT NULL, + update_time timestamp without time zone DEFAULT now() + ); + +DROP TABLE IF EXISTS linkis_ps_configutation_lm_across_cluster_rule; +CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + rule_id INT PRIMARY KEY AUTOINCREMENT, + cluster_name varchar(32) NOT NULL, + creator varchar(32) NOT NULL, + username varchar(32) NOT NULL, + create_time TIMESTAMP NOT NULL DEFAULT NOW(), + create_by varchar(32) NOT NULL, + update_time TIMESTAMP NOT NULL DEFAULT NOW(), + update_by varchar(32), + rules TEXT NOT NULL, + is_valid varchar(1) DEFAULT'N' +); DROP TABLE IF EXISTS "linkis_ps_job_history_group_history"; CREATE TABLE linkis_ps_job_history_group_history ( @@ -176,6 +225,7 @@ DROP TABLE IF EXISTS "linkis_ps_common_lock"; CREATE TABLE linkis_ps_common_lock ( id bigserial NOT NULL, lock_object varchar(255) NULL, + locker varchar(255) NOT NULL, time_out text NULL, update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, @@ -188,6 +238,8 @@ DROP TABLE IF EXISTS "linkis_ps_udf_manager"; CREATE TABLE linkis_ps_udf_manager ( id bigserial NOT NULL, user_name varchar(20) NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_udf_manager_pkey PRIMARY KEY (id) ); @@ -197,6 +249,8 @@ CREATE TABLE linkis_ps_udf_shared_group ( id bigserial NOT NULL, udf_id int8 NOT NULL, shared_group varchar(50) NOT NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_udf_shared_group_pkey PRIMARY KEY (id) ); @@ -206,6 +260,8 @@ CREATE TABLE linkis_ps_udf_shared_info ( id bigserial NOT NULL, udf_id int8 NOT NULL, user_name varchar(50) NOT NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_ps_udf_shared_info_pkey PRIMARY KEY (id) ); @@ -224,16 +280,18 @@ CREATE TABLE linkis_ps_udf_tree ( ); COMMENT ON COLUMN "linkis_ps_udf_tree"."name" IS 'Category name of the function. It would be displayed in the front-end'; COMMENT ON COLUMN "linkis_ps_udf_tree"."category" IS 'Used to distinguish between udf and function'; - +CREATE UNIQUE INDEX uniq_parent_name_uname_category ON linkis_ps_udf_tree USING btree (parent,name,user_name,category); DROP TABLE IF EXISTS "linkis_ps_udf_user_load"; CREATE TABLE linkis_ps_udf_user_load ( id bigserial NOT NULL, udf_id int4 NOT NULL, user_name varchar(50) NOT NULL, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, + create_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, CONSTRAINT linkis_ps_udf_user_load_pkey PRIMARY KEY (id) ); - +CREATE UNIQUE INDEX uniq_uid_uname ON linkis_ps_udf_user_load USING btree (udf_id, user_name); DROP TABLE IF EXISTS "linkis_ps_udf_baseinfo"; CREATE TABLE linkis_ps_udf_baseinfo ( @@ -264,6 +322,7 @@ CREATE TABLE linkis_ps_udf_version ( use_format varchar(255) NULL, description varchar(255) NOT NULL, create_time timestamptz(6) NOT NULL DEFAULT CURRENT_TIMESTAMP, + update_time timestamp(6) NULL DEFAULT CURRENT_TIMESTAMP, md5 varchar(100) NULL, CONSTRAINT linkis_ps_udf_version_pkey PRIMARY KEY (id) ); @@ -509,7 +568,7 @@ COMMENT ON COLUMN "linkis_ps_bml_resources"."max_version" IS '默认为10,指 COMMENT ON COLUMN "linkis_ps_bml_resources"."update_time" IS '更新时间'; COMMENT ON COLUMN "linkis_ps_bml_resources"."updator" IS '更新者'; COMMENT ON COLUMN "linkis_ps_bml_resources"."enable_flag" IS '状态,1:正常,0:冻结'; - +CREATE UNIQUE INDEX uniq_rid_eflag ON linkis_ps_bml_resources USING btree (resource_id, enable_flag); DROP TABLE IF EXISTS "linkis_ps_bml_resources_version"; CREATE TABLE linkis_ps_bml_resources_version ( diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql index 0a15f31ca1..8a1dcffa3a 100644 --- a/linkis-dist/package/db/linkis_dml.sql +++ b/linkis-dist/package/db/linkis_dml.sql @@ -83,19 +83,17 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python3', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.spill=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.locality.wait', '范围:0-3,单位:秒', '任务调度本地等待时间', '3s', 'OFT', '[\"0s\",\"1s\",\"2s\",\"3s\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0-3, Unit: second', 'Task Scheduling Local Waiting Time'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.memory.fraction', '范围:0.4,0.5,0.6,单位:百分比', '执行内存和存储内存的百分比', '0.6', 'OFT', '[\"0.4\",\"0.5\",\"0.6\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0.4, 0.5, 0.6, in percentage', 'Percentage Of Execution Memory And Storage Memory'); + -- hive INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:-1-10000,单位:个', 'reduce数', '-1', 'NumInterval', '[-1,10000]', '0', '1', '1', 'hive资源设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'hive引擎设置', 'hive'); - -- python INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python'); @@ -380,8 +378,8 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12001','队列CPU资源不足,可以调整Spark执行器个数','Queue CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('12002','队列内存资源不足,可以调整Spark执行器个数','Insufficient queue memory',0); @@ -406,8 +404,11 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0); + -- 21 cluster Authority 22 db Authority -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue ([A-Za-z._0-9]+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0); @@ -508,7 +509,9 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43013','索引使用错误','IndexError',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43014','sql语法有问题','raise ParseException',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','当前节点需要的CS表解析失败,请检查当前CSID对应的CS表是否存在','Cannot parse cs table for node',0); - +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43017','存在参数无效或拼写错误,请确认 %s 参数正确性','KeyError: (\\(.+\\))',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43018','文件未找到,请确认该路径( %s )是否存在','FileNotFoundError.*No such file or directory\\:\\s\'(\\S+)\'',0); -- 46 importExport INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0); diff --git a/linkis-dist/package/db/module/linkis_configuration.sql b/linkis-dist/package/db/module/linkis_configuration.sql index fefa6f9f99..a171c71819 100644 --- a/linkis-dist/package/db/module/linkis_configuration.sql +++ b/linkis-dist/package/db/module/linkis_configuration.sql @@ -14,23 +14,29 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - + DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`; -CREATE TABLE `linkis_ps_configuration_config_key`( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', - `description` varchar(200) DEFAULT NULL, - `name` varchar(50) DEFAULT NULL, - `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', - `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', - `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', - `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', - `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', - `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', - `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', - `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; +CREATE TABLE `linkis_ps_configuration_config_key` +( + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `key` varchar(50) DEFAULT NULL COMMENT 'Set key, e.g. spark.executor.instances', + `description` varchar(200) DEFAULT NULL, + `name` varchar(50) DEFAULT NULL, + `default_value` varchar(200) DEFAULT NULL COMMENT 'Adopted when user does not set key', + `validate_type` varchar(50) DEFAULT NULL COMMENT 'Validate type, one of the following: None, NumInterval, FloatInterval, Include, Regex, OPF, Custom Rules', + `validate_range` varchar(50) DEFAULT NULL COMMENT 'Validate range', + `engine_conn_type` varchar(50) DEFAULT NULL COMMENT 'engine type,such as spark,hive etc', + `is_hidden` tinyint(1) DEFAULT NULL COMMENT 'Whether it is hidden from user. If set to 1(true), then user cannot modify, however, it could still be used in back-end', + `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', + `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', + `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', + PRIMARY KEY (`id`) +); + DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; diff --git a/linkis-dist/package/db/module/linkis_configuration_dml.sql b/linkis-dist/package/db/module/linkis_configuration_dml.sql index 0d989eba38..7f34f8a75f 100644 --- a/linkis-dist/package/db/module/linkis_configuration_dml.sql +++ b/linkis-dist/package/db/module/linkis_configuration_dml.sql @@ -60,11 +60,6 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', 'spark执行器内存大小', '3g', 'Regex', '^([3-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','2g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -- hive INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); diff --git a/linkis-dist/package/db/module/linkis_udf.sql b/linkis-dist/package/db/module/linkis_udf.sql index 999793b1cc..3e7b2c4f13 100644 --- a/linkis-dist/package/db/module/linkis_udf.sql +++ b/linkis-dist/package/db/module/linkis_udf.sql @@ -25,6 +25,8 @@ DROP TABLE IF EXISTS `linkis_ps_udf_manager`; CREATE TABLE `linkis_ps_udf_manager` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -73,9 +75,12 @@ CREATE TABLE `linkis_ps_udf_tree` ( DROP TABLE IF EXISTS `linkis_ps_udf_user_load`; CREATE TABLE `linkis_ps_udf_user_load` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, - `udf_id` int(11) NOT NULL, + `udf_id` bigint(20) NOT NULL, `user_name` varchar(50) NOT NULL, - PRIMARY KEY (`id`) + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`), + UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; diff --git a/linkis-dist/package/db/udf/udf_sys.sql b/linkis-dist/package/db/udf/udf_sys.sql new file mode 100644 index 0000000000..903834596c --- /dev/null +++ b/linkis-dist/package/db/udf/udf_sys.sql @@ -0,0 +1,813 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +-- 字符串函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","字符串函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="字符串函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","",""," Column substring(Column str, int pos, int len)","Returns the substring from string str before count occurrences of the delimiter delim.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","concat","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "concat"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string concat(STRING|BINARY a, STRING|BINARY b...)","Returns the string or bytes resulting from concatenating the strings or bytes passed in as parameters in order. For example, concat('foo', 'bar') results in 'foobar'. Note that this function can take any number of input strings.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","concat_ws","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "concat_ws"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string concat_ws(string SEP, array)","Like concat(), but with custom separator SEP.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","decode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "decode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string decode(binary bin, string charset)","Decodes the first argument into a String using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). If either argument is null, the result will also be null. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","elt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "elt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string elt(N int,str1 string,str2 string,str3 string,...)","Return string at index number. For example elt(2,'hello','world') returns 'world'.?Returns NULL if N is less than 1 or greater than the number of arguments.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","format_number","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "format_number"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string format_number(number x, int d)","Formats the number X to a format like '#,###,###.##', rounded to D decimal places, and returns the result as a string. If D is 0, the result has no decimal point or fractional part.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","get_json_object","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "get_json_object"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string get_json_object(string json_string, string path)","Extracts json object from a json string based on json path specified, and returns json string of the extracted json object. It will return null if the input json string is invalid.?NOTE: The json path can only have the characters [0-9a-z_], i.e., no upper",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lower","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lower"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lower(string A) lcase(string A)","Returns the string resulting from converting all characters of B to lower case. For example, lower('fOoBaR') results in 'foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lcase","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lcase"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lcase(string A)","Returns the string resulting from converting all characters of B to lower case. For example, lower('fOoBaR') results in 'foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lpad","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lpad"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string lpad(string str, int len, string pad)","Returns str, left-padded with pad to a length of len. If str is longer than len, the return value is shortened to len characters. In case of empty pad string, the return value is null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ltrim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ltrim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string ltrim(string A)","Returns the string resulting from trimming spaces from the beginning(left hand side) of A. For example, ltrim(' foobar ') results in 'foobar '.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","parse_url","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "parse_url"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string parse_url(string urlString, string partToExtract [, string keyToExtract])","Returns the specified part from the URL. Valid values for partToExtract include HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, and USERINFO. For example, parse_url('http://facebook.com/path1/p.php?k1=v1&k2=v2#Ref1', 'HOST') returns 'facebook.com'. Als",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","printf","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "printf"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string printf(String format, Obj... args)","Returns the input formatted according do printf-style format strings .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regexp_extract","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regexp_extract"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string regexp_extract(string subject, string pattern, int index)","Returns the string extracted using the pattern. For example, regexp_extract('foothebar', 'foo(.*?)(bar)', 2) returns 'bar.' Note that some care is necessary in using predefined character classes: using 's' as the second argument will match the letter s; '",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regexp_replace","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regexp_replace"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string regexp_replace(string INITIAL_STRING, string PATTERN, string REPLACEMENT)","Returns the string resulting from replacing all substrings in INITIAL_STRING that match the java regular expression syntax defined in PATTERN with instances of REPLACEMENT. For example, regexp_replace(foobar, oo|ar, ) returns 'fb.' Note that some care is ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","repeat","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "repeat"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string repeat(string str, int n)","Repeats str n times.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","replace","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "replace"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string replace(string A, string OLD, string NEW)","Returns the string A with all non-overlapping?occurrences of OLD replaced with NEW . Example: select replace(ababab, abab, Z); returns Zab.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","reverse","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "reverse"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string reverse(string A)","Returns the reversed string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rpad","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rpad"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string rpad(string str, int len, string pad)","Returns str, right-padded with pad to a length of len. If str is longer than len, the return value is shortened to len characters. In case of empty pad string, the return value is null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rtrim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rtrim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string rtrim(string A)","Returns the string resulting from trimming spaces from the end(right hand side) of A. For example, rtrim(' foobar ') results in ' foobar'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","space","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "space"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string space(int n)","Returns a string of n spaces.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substr(STRING|BINARY A, INT start [, INT len])","Returns the substring or slice of the byte array of A starting from start position till the end of string A or with optional length len. For example, substr('foobar', 4) results in 'bar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substring(STRING|BINARY a, INT start [, INT len])","Returns the substring or slice of the byte array of A starting from start position till the end of string A or with optional length len. For example, substr('foobar', 4) results in 'bar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","substring_index","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "substring_index"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string substring_index(string A, string delim, int count)","Returns the substring from string A before count occurrences of the delimiter delim. If count is positive, everything to the left of the final delimiter (counting from the left) is returned. If count is negative, everything to the right of the final delim",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","translate","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "translate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string translate(string|char|varchar input, string|char|varchar from, string|char|varchar to)","Translates the input string by replacing the characters present in the?from?string with the corresponding characters in the?to?string. This is similar to the?translatefunction in?PostgreSQL. If any of the parameters to this UDF are NULL, the result is NUL",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","trim","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "trim"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string trim(string A)","Returns the string resulting from trimming spaces from both ends of A. For example, trim(' foobar ') results in 'foobar'",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","upper","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "upper"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string upper(string A)","Returns the string resulting from converting all characters of A to upper case. For example, upper('fOoBaR') results in 'FOOBAR'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ucase","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ucase"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string ucase(string A)","Returns the string resulting from converting all characters of A to upper case. For example, upper('fOoBaR') results in 'FOOBAR'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","initcap","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "initcap"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string initcap(string A)","Returns string, with the first letter of each word in uppercase, all other letters in lowercase. Words are delimited by whitespace.?",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","soundex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "soundex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string soundex(string A)","Returns soundex code of the string. For example, soundex('Miller') results in M460.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","str_to_map","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "str_to_map"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","map str_to_map(text[, delimiter1, delimiter2])","Splits text into key-value pairs using two delimiters. Delimiter1 separates text into K-V pairs, and Delimiter2 splits each K-V pair. Default delimiters are ',' for delimiter1 and ':' for delimiter2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ascii","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ascii"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int ascii(string str)","Returns the numeric value of the first character of str.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","character_length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "character_length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int character_length(string str)","Returns the number of UTF-8 characters contained in str . The function char_length is shorthand for this function.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","field","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "field"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int field(val T,val1 T,val2 T,val3 T,...)","Returns the index of val in the val1,val2,val3,... list or 0 if not found.?For example?field('world','say','hello','world') returns 3.All primitive types are supported, arguments are compared using str.equals(x). If val is NULL, the return value is 0.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","find_in_set","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "find_in_set"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int find_in_set(string str, string strList)","Returns the first occurance of str in strList where strList is a comma-delimited string. Returns null if either argument is null. Returns 0 if the first argument contains any commas. For example, find_in_set('ab', 'abc,b,ab,c,def') returns 3.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","instr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "instr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int instr(string str, string substr)","Returns the position of the first occurrence of?substr?in?str. Returns?null?if either of the arguments are?null?and returns?0?if?substr?could not be found in?str. Be aware that this is not zero based. The first character in?str?has index 1.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int length(string A)","Returns the length of the string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","locate","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "locate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int locate(string substr, string str[, int pos])","Returns the position of the first occurrence of substr in str after position pos.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","octet_length","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "octet_length"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int octet_length(string str)","Returns the number of octets required to hold the string str in UTF-8 encoding. Note that octet_length(str) can be larger than character_length(str).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","levenshtein","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "levenshtein"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int levenshtein(string A, string B)","Returns the Levenshtein distance between two strings?. For example, levenshtein('kitten', 'sitting') results in 3.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","in_file","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "in_file"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean in_file(string str, string filename)","Returns true if the string str appears as an entire line in filename.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","encode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "encode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary encode(string src, string charset)","Encodes the first argument into a BINARY using the provided character set (one of 'US-ASCII', 'ISO-8859-1', 'UTF-8', 'UTF-16BE', 'UTF-16LE', 'UTF-16'). If either argument is null, the result will also be null.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","expr","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "expr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expr(String expr)","Parses the expression string into the column that it represents, similar to DataFrame.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unbase64","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unbase64"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary unbase64(string str)","Converts the argument from a base 64 string to BINARY. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","context_ngrams","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "context_ngrams"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> context_ngrams(array>, array, int K, int pf)","Returns the top-k contextual N-grams from a set of tokenized sentences, given a string of context. See?StatisticsAndDataMining?for more information.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","format_string","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "format_string"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column format_string(String format, scala.collection.Seq arguments)","Formats the arguments in printf-style and returns the result as a string column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ngrams","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ngrams"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> ngrams(array>, int N, int K, int pf)","Returns the top-k N-grams from a set of tokenized sentences, such as those returned by the sentences() UDAF. See?StatisticsAndDataMining?for more information.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sentences","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sentences"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array> sentences(string str, string lang, string locale)","Tokenizes a string of natural language text into words and sentences, where each sentence is broken at the appropriate sentence boundary and returned as an array of words. The 'lang' and 'locale' are optional arguments. For example, sentences('Hello there",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","split","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "split"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array split(string str, string pat)","Splits str around pat (pat is a regular expression).",now(),"",now()); +-- 数值函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","数值函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="数值函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","abs","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "abs"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column abs(Column e)","Computes the absolute value",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","randn","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "randn"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column randn(long seed)","Generate a column with independent and identically distributed (i.i.d.) samples from the standard normal distribution.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rint","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rint"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column rint(Column e)","Returns the double value that is closest in value to the argument and is equal to a mathematical integer.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","signum","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "signum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column signum(Column e)","Computes the signum of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sinh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sinh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column sinh(String columnName)","Computes the hyperbolic sine of the given column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","tanh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "tanh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column tanh(Column e)","Computes the hyperbolic tangent of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","toDegrees","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "toDegrees"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column toDegrees(Column e)","Use degrees. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","toRadians","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "toRadians"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column toRadians(Column e)","Use radians.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","atan2","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "atan2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column atan2(Column l, Column r)","Returns the angle theta from the conversion of rectangular coordinates (x, y) to polar coordinates (r, theta).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cosh","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cosh"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column cosh(Column e)","Computes the hyperbolic cosine of the given value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","expm1","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "expm1"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expm1(Column e)","Computes the exponential of the given value minus one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","round","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "round"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE round(DOUBLE a [, INT d])","Returns the rounded BIGINT value of a or a rounded to d decimal places.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hypot","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hypot"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column hypot(Column l, Column r)","Computes sqrt(a^2^ + b^2^) without intermediate overflow or underflow.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bround","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bround"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE bround(DOUBLE a [, INT decimals])","Returns the rounded BIGINT value of a using HALF_EVEN rounding mode with optional decimal places d.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","floor","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "floor"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT floor(DOUBLE a)","Returns the maximum?BIGINT?value that is equal to or less than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ceil","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ceil"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT ceil(DOUBLE a)","Returns the minimum BIGINT value that is equal to or greater than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ceiling","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ceiling"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT ceiling(DOUBLE a)","Returns the minimum BIGINT value that is equal to or greater than?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rand","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rand"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE rand([INT seed])","Returns a random number (that changes from row to row) that is distributed uniformly from 0 to 1. Specifying the seed will make sure the generated random number sequence is deterministic.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log1p","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log1p"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column log1p(Column e)","Computes the natural logarithm of the given value plus one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","exp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "exp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE exp(TL a)","Returns?ea?where?e?is the base of the natural logarithm. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ln","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ln"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE ln(TL a)","Returns the natural logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log10","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log10"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log10(TL a)","Returns the base-10 logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log2(DOUBLE|a)","Returns the base-2 logarithm of the argument?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","log","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "log"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE log(TL base, DOUBLE |DECIMALa)","Returns the base-base?logarithm of the argument?a.?",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pow","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pow"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE pow(DOUBLE a, DOUBLE p)","Returns?ap.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","power","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "power"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE power(DOUBLE a, DOUBLE p)","Returns?ap.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sqrt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sqrt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sqrt(DOUBLE a), sqrt(DECIMAL a)","Returns the square root of?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING bin(BIGINT a)","Returns the number in binary format.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING hex(BIGINT a) hex(STRING a) hex(BINARY a)","If the argument is an?INT?or?binary,?hex?returns the number as a?STRING?in hexadecimal format. Otherwise if the number is a?STRING, it converts each character into its hexadecimal representation and returns the resulting?STRING.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unhex","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unhex"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BINARY unhex(STRING a)","Inverse of hex. Interprets each pair of characters as a hexadecimal number and converts to the byte representation of the number. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","conv","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "conv"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","STRING conv(BIGINT num, INT from_base, INT to_base), conv(STRING num, INT from_base, INT to_base)","Converts a number from a given base to another .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","abs","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "abs"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE abs(DOUBLE a)","Returns the absolute value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pmod","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pmod"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T pmod(T a, T b),","Returns the positive value of?a mod b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sin(T a)","Returns the sine of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asin","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asin"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE asin(T a)","Returns the arc sin of?a?if -1<=a<=1 or NULL otherwise. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cos","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cos"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE cos(T a)","Returns the cosine of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","acos","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "acos"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE acos(T a)","Returns the arccosine of?a?if -1<=a<=1 or NULL otherwise. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","tan","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "tan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE tan(T a)","Returns the tangent of?a?(a?is in radians). ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","atan","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "atan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE atan(T a)","Returns the arctangent of?a. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","degrees","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "degrees"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE degrees(T a)","Converts value of?a?from radians to degrees. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","radians","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "radians"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE radians(T a)","Converts value of?a?from degrees to radians. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","positive","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "positive"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T positive(T a)","Returns?a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","negate","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "negate"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column negate(Column e)","Unary minus.negate the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","negative","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "negative"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T negative(T a)","Returns?-a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sign","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sign"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T sign(T a)","Returns the sign of?a?as '1.0' (if?a?is positive) or '-1.0' (if?a?is negative), '0.0' otherwise. The decimal version returns INT instead of DOUBLE. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","e","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "e"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE e()","Returns the value of?e.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","pi","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "pi"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE pi()","Returns the value of?pi.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","factorial","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "factorial"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT factorial(INT a)","Returns the factorial of?a?. Valid?a?is [0..20].",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cbrt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cbrt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE cbrt(DOUBLE a)","Returns the cube root of?a?double value?.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftleft","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftleft"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftleft(T a, INT b)","Bitwise left shift. Shifts a b positions to the left. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftright","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftright"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftright(T a, INT b)","Bitwise right shift. Shifts a b positions to the right. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","shiftrightunsigned","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "shiftrightunsigned"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T shiftrightunsigned(T a, INT b)","Bitwise unsigned right shift. Shifts a b positions to the right. Returns int for tinyint, smallint and int a. Returns bigint for bigint a.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","greatest","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "greatest"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T greatest(T v1, T v2, ...)","Returns the greatest value of the list of values. Fixed to return NULL when one or more arguments are NULL, and strict type restriction relaxed, consistent with > operator.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","least","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "least"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T least(T v1, T v2, ...)","Returns the least value of the list of values.?Fixed to return NULL when one or more arguments are NULL, and strict type restriction relaxed, consistent with < operator .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","width_bucket","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "width_bucket"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","INT width_bucket(NUMERIC expr, NUMERIC min_value, NUMERIC max_value, INT num_buckets)","Returns an integer between 0 and num_buckets+1 by mapping expr into the ith equally sized bucket. Buckets are made by dividing [min_value, max_value] into?equally sized regions. If expr < min_value, return 1, if expr > max_value return num_buckets+1.",now(),"",now()); +-- 日期函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","日期函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="日期函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","",""," Column last_day(Column e)","Given a date column, returns the last day of the month which the given date belongs to.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dayofyear","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dayofyear"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column dayofyear(Column e)","Extracts the day of the year as an integer from a given date/timestamp/string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_unixtime","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_unixtime"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string from_unixtime(bigint unixtime[, string format])","Converts the number of seconds from unix epoch (1970-01-01 00:00:00 UTC) to a string representing the timestamp of that moment in the current system time zone in the format of 1970-01-01 00:00:00.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp()","Gets current Unix timestamp in seconds. This function is not deterministic and its value is not fixed for the scope of a query execution, therefore prevents proper optimization of queries - this has been deprecated since 2.0 in favour of CURRENT_TIMESTAMP",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp(string date)","Converts time string in format?yyyy-MM-dd HH:mm:ss?to Unix timestamp (in seconds), using the default timezone and the default locale, return 0 if fail: unix_timestamp('2009-03-20 11:30:01') = 1237573801",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","unix_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "unix_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint unix_timestamp(string date, string pattern)","Convert time string with given pattern to Unix time stamp (in seconds), return 0 if fail: unix_timestamp('2009-03-20', 'yyyy-MM-dd') = 1237532400.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_date","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_date"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string to_date(string timestamp)","Returns the date part of a timestamp string : to_date(1970-01-01 00:00:00) = 1970-01-01. returns a date object.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","year","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "year"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int year(string date)","Returns the year part of a date or a timestamp string: year(1970-01-01 00:00:00) = 1970, year(1970-01-01) = 1970.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","quarter","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "quarter"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int quarter(date/timestamp/string)","Returns the quarter of the year for a date, timestamp, or string in the range 1 to 4 . Example: quarter('2015-04-08') = 2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","month","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "month"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int month(string date)","Returns the month part of a date or a timestamp string: month(1970-11-01 00:00:00) = 11, month(1970-11-01) = 11.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int day(string date) ","Returns the day part of a date or a timestamp string: day('1970-11-01 00:00:00') = 1, day('1970-11-01') = 1",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dayofmonth","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dayofmonth"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int dayofmonth(date)","Returns the day part of a date or a timestamp string: dayofmonth('1970-11-01 00:00:00') = 1, dayofmonth('1970-11-01') = 1.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hour","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hour"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int hour(string date)","Returns the hour of the timestamp: hour('2009-07-30 12:58:59') = 12, hour('12:58:59') = 12.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","minute","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "minute"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int minute(string date)","Returns the minute of the timestamp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","second","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "second"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int second(string date)","Returns the second of the timestamp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","weekofyear","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "weekofyear"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int weekofyear(string date)","Returns the week number of a timestamp string: weekofyear(1970-11-01 00:00:00) = 44, weekofyear(1970-11-01) = 44.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","extract","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "extract"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int extract(field FROM source)","Retrieve fields such as days or hours from source. Source must be a date, timestamp, interval or a string that can be converted into either a date or timestamp. Supported fields include: day, dayofweek, hour, minute, month, quarter, second, week and year.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","datediff","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "datediff"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int datediff(string enddate, string startdate)","Returns the number of days from startdate to enddate: datediff('2009-03-01', '2009-02-27') = 2.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_add","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_add"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_add(date/timestamp/string?startdate, tinyint/smallint/int days)","Adds a number of days to startdate: date_add('2008-12-31', 1) = '2009-01-01'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_sub","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_sub"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_sub(date/timestamp/string?startdate, tinyint/smallint/int days)","Subtracts a number of days to startdate: date_sub('2008-12-31', 1) = '2008-12-30'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_utc_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_utc_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp from_utc_timestamp({any primitive type} ts, string timezone)","Converts a timestamp* in UTC to a given timezone?.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_utc_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_utc_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp to_utc_timestamp({any?primitive type} ts, string timezone)","Converts a timestamp* in a given timezone to UTC.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_date","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_date"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","date current_date","Returns the current date at the start of query evaluation . All calls of current_date within the same query return the same value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_timestamp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_timestamp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","timestamp current_timestamp","Returns the current timestamp at the start of query evaluation. All calls of current_timestamp within the same query return the same value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","add_months","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "add_months"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string add_months(string start_date, int num_months,?output_date_format)","Returns the date that is num_months after start_date. start_date is a string, date or timestamp. num_months is an integer.?If start_date is the last day of the month or if the resulting month has fewer days than the day component of start_date, then the r",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string last_day(string date)","Returns the last day of the month which the date belongs to. date is a string in the format 'yyyy-MM-dd HH:mm:ss' or 'yyyy-MM-dd'.?The time part of date is ignored.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","next_day","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "next_day"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string next_day(string start_date, string day_of_week)","Returns the first date which is later than start_date and named as day_of_week?.?start_date is a string/date/timestamp. day_of_week is 2 letters, 3 letters or full name of the day of the week (e.g. Mo, tue, FRIDAY). The time part of start_date is ignored.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","trunc","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "trunc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string trunc(string date, string format)","Returns date truncated to the unit specified by the format?. Supported formats: MONTH/MON/MM, YEAR/YYYY/YY. Example: trunc('2015-03-17', 'MM') = 2015-03-01.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","months_between","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "months_between"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double months_between(date1, date2)","Returns number of months between dates date1 and date2 . If date1 is later than date2, then the result is positive. If date1 is earlier than date2, then the result is negative. If date1 and date2 are either the same days of the month or both last days of ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","date_format","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "date_format"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string date_format(date/timestamp/string ts, string fmt)","Converts a date/timestamp/string to a value of string in the format specified by the date format fmt . Supported formats are Java SimpleDateFormat formats?.The second argument fmt should be constant. Example: date_format('2015-04-08', 'y') = '2015'.",now(),"",now()); +-- 聚合函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","聚合函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="聚合函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sum","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","Column sum(Column e)"," returns the sum of all values in the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","skewness","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "skewness"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column skewness(Column e)","returns the skewness of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column stddev(Column e)","alias for stddev_samp.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sumDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sumDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column sumDistinct(Column e)","returns the sum of distinct values in the expression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","countDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "countDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column countDistinct(String columnName,String... columnNames)","returns the number of distinct items in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column first(Column e)"," returns the first value in a group.The function by default returns the first values it sees. It will return the first non-null value it sees when ignoreNulls is set to true. If all values are null, then null is returned.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","grouping_id","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "grouping_id"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column grouping_id(String colName,scala.collection.Seq colNames)","returns the level of grouping,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","grouping","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "grouping"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column grouping(String columnName)","indicates whether a specified column in a GROUP BY list is aggregated or not, returns 1 for aggregated or 0 for not aggregated in the result set.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","kurtosis","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "kurtosis"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column kurtosis(Column e)","returns the kurtosis of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column last(Column e,boolean ignoreNulls)","returns the last value in a group.The function by default returns the last values it sees. It will return the last non-null value it sees when ignoreNulls is set to true. If all values are null, then null is returned.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mean","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mean"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column mean(String columnName)","returns the average of the values in a group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","count","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "count"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","BIGINT count([DISTINCT] col)","count(*) - Returns the total number of retrieved rows, including rows containing NULL values. count(expr) - Returns the number of rows for which the supplied expression is non-NULL. count(DISTINCT expr[, expr]) - Returns the number of rows for which the s",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sum","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sum"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE sum(col), sum(DISTINCT col)","Returns the sum of the elements in the group or the sum of the distinct values of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","avg","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "avg"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE avg(col), avg(DISTINCT col)","Returns the average of the elements in the group or the average of the distinct values of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","min","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "min"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE min(col)","Returns the minimum of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","max","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "max"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE max(col)","Returns the maximum value of the column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","variance","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "variance"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE variance(col)","Returns the variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","var_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "var_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE var_pop(col)","Returns the variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","var_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "var_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE var_samp(col)","Returns the unbiased sample variance of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE stddev_pop(col)","Returns the standard deviation of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stddev_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stddev_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE stddev_samp(col)","Returns the unbiased sample standard deviation of a numeric column in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","covar_pop","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "covar_pop"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE covar_pop(col1, col2)","Returns the population covariance of a pair of numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","covar_samp","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "covar_samp"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE covar_samp(col1, col2)","Returns the sample covariance of a pair of a numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","corr","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "corr"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE corr(col1, col2)","Returns the Pearson coefficient of correlation of a pair of a numeric columns in the group.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE percentile(BIGINT col, p)","Returns the exact pth?percentile of a column in the group (does not work with floating point types). p must be between 0 and 1. NOTE: A true percentile can only be computed for integer values. Use PERCENTILE_APPROX if your input is non-integral.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array percentile(BIGINT col, array(p1?[, p2]...))","Returns the exact percentiles p1, p2, ... of a column in the group (does not work with floating point types). pi?must be between 0 and 1. NOTE: A true percentile can only be computed for integer values. Use PERCENTILE_APPROX if your input is non-integral.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile_approx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile_approx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","DOUBLE percentile_approx(DOUBLE col, p [, B])","Returns an approximate pth?percentile of a numeric column (including floating point types) in the group. The B parameter controls approximation accuracy at the cost of memory. Higher values yield better approximations, and the default is 10,000. When the ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percentile_approx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percentile_approx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array percentile_approx(DOUBLE col, array(p1?[, p2]...) [, B])","Same as above, but accepts and returns an array of percentile values instead of a single one.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_avgx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_avgx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_avgx(independent, dependent)","Equivalent to avg(dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_avgy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_avgy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_avgy(independent, dependent)","Equivalent to avg(independent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_count","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_count"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_count(independent, dependent)","Returns the number of non-null pairs used to fit the linear regression line.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_intercept","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_intercept"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_intercept(independent, dependent)","Returns the y-intercept of the?linear regression line, i.e. the value of b in the equation dependent = a * independent + b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_r2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_r2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_r2(independent, dependent)","Returns the?coefficient of determination?for the regression.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_slope","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_slope"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_slope(independent, dependent)","Returns the slope of the?linear regression line, i.e. the value of a in the equation dependent = a * independent + b.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_sxx","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_sxx"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_sxx(independent, dependent)","Equivalent to regr_count(independent, dependent) * var_pop(dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_sxy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_sxy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_sxy(independent, dependent)","Equivalent to regr_count(independent, dependent) * covar_pop(independent, dependent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","regr_syy","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "regr_syy"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","double regr_syy(independent, dependent)","Equivalent to regr_count(independent, dependent) * var_pop(independent).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","histogram_numeric","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "histogram_numeric"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array histogram_numeric(col, b)","Computes a histogram of a numeric column in the group using b non-uniformly spaced bins. The output is an array of size b of double-valued (x,y) coordinates that represent the bin centers and heights",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","collect_set","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "collect_set"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array collect_set(col)","Returns a set of objects with duplicate elements eliminated.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","collect_list","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "collect_list"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array collect_list(col)","Returns a list of objects with duplicates. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ntile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ntile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","INTEGER ntile(INTEGER x)","Divides an ordered partition into?x?groups called buckets and assigns a bucket number to each row in the partition. This?allows easy calculation of tertiles, quartiles, deciles, percentiles and other?common summary statistics. ",now(),"",now()); +-- 条件判断函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","条件判断函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="条件判断函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","not","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "not"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column not(Column e)","Inversion of boolean expression,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","when","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "when"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column when(Column condition,Object value)","Evaluates a list of conditions and returns one of multiple possible result expressions. If otherwise is not defined at the end, null is returned for unmatched conditions.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnan","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnan"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column isnan(Column e)","Return true iff the column is NaN.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nanvl","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nanvl"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column nanvl(Column col1, Column col2)","Returns col1 if it is not NaN, or col2 if col1 is NaN.Both inputs should be floating point columns (DoubleType or FloatType).",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","point","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "point"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","","columns (DoubleType or FloatType)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","if","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "if"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T if(boolean testCondition, T valueTrue, T valueFalseOrNull)","Returns valueTrue when testCondition is true, returns valueFalseOrNull otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnull","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnull"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean isnull( a )","Returns true if a is NULL and false otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","isnotnull ","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "isnotnull "; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean isnotnull ( a )","Returns true if a is not NULL and false otherwise.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nvl","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nvl"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T nvl(T value, T default_value)","Returns default value if value is null else returns value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","coalesce","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "coalesce"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T coalesce(T v1, T v2, ...)","Returns the first v that is not NULL, or NULL if all v's are NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","nullif","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "nullif"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T nullif( a, b )","Returns NULL if a=b; otherwise returns a?.Shorthand for: CASE?WHEN a = b then NULL else a",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","assert_true","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "assert_true"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","void assert_true(boolean condition)","Throw an exception if 'condition' is not true, otherwise return null . For example, select assert_true (2<1).",now(),"",now()); +-- 类型转换函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","类型转换函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="类型转换函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","binary","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "binary"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary binary(string|binary)","Casts the parameter into a binary.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cast","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cast"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Expected = to follow type cast(expr as )","Converts the results of the expression expr to . For example, cast('1' as BIGINT) will convert the string '1' to its integral representation. A null is returned if the conversion does not succeed. If cast(expr as boolean) Hive returns true for a non",now(),"",now()); +-- 集合操作函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","集合操作函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="集合操作函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","struct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "struct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column struct(scala.collection.Seq cols)","Creates a new struct column. If the input column is a column in a DataFrame, or a derived column expression that is named (i.e. aliased), its name would be remained as the StructField's name, otherwise, the newly generated StructField's name would be auto",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","col","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "col"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column col(String colName)","Returns a Column based on the given column name.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","column","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "column"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column column(String colName)","Returns a Column based on the given column name. Alias of col.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc_nulls_first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc_nulls_first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc_nulls_first(String columnName)","Returns a sort expression based on the descending order of the column, and null values appear before non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc_nulls_last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc_nulls_last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc_nulls_last(String columnName)","Returns a sort expression based on the descending order of the column, and null values appear after non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","desc","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "desc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column desc(String columnName)","Returns a sort expression based on the descending order of the column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","array","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "array"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array(val1, val2, ...)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","map(key1, value1, ...)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","size","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "size"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int size(Map|Array a)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map_keys","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map_keys"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array map_keys(Map)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","map_values","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "map_values"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array map_values(Map)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","array_contains","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "array_contains"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","boolean array_contains(Array, value)","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sort_array","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sort_array"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","array sort_array(Array)","",now(),"",now()); +-- 数据加密函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","数据加密函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="数据加密函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","base64","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "base64"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column base64(Column e)","Computes the BASE64 encoding of a binary column and returns it as a string column. This is the reverse of unbase64.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask(string str[, string upper[, string lower[, string number]]])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_first_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_first_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_first_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_last_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_last_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_last_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_show_first_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_show_first_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_show_first_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_show_last_n","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_show_last_n"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_show_last_n(string str[, int n])","",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","mask_hash","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "mask_hash"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string mask_hash(string|char|varchar str)","",now(),"",now()); +-- 生成表函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","生成表函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="生成表函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","explode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "explode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T explode(Array|Array|Map a)","Explodes an array or map to multiple rows. Returns a row-set with a single column (col), one row for each element from the array or ?a row-set with a two columns (key,value)?,?one row for each key-value pair from the input map",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","posexplode","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "posexplode"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int,T posexplode(ARRAY a)","Explodes an array to multiple rows with additional positional column of?int?type (position of items in the original array, starting with 0). Returns a row-set with two columns (pos,val), one row for each element from the array.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","inline","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "inline"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T1,...,Tn inline(ARRAY> a)","Explodes an array of structs to multiple rows.?Returns?a row-set with N columns (N = number of top level elements in the struct), one row per struct from the array. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","stack","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "stack"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","T1,...,Tn/r stack(int r,T1?V1,...,Tn/r?Vn)","Breaks up?n?values V1,...,Vn?into?r?rows. Each row will have?n/r?columns.?r?must be constant.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","json_tuple","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "json_tuple"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string1,...,stringn json_tuple(string?jsonStr,string?k1,...,string?kn)","Takes?JSON string and?a set of?n?keys, and returns a tuple of?n?values. This is a more efficient version of the?get_json_object?UDF because it can get multiple keys with just one call.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","parse_url_tuple","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "parse_url_tuple"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string?1,...,stringn parse_url_tuple(string?urlStr,string?p1,...,string?pn)","Takes?URL string and?a set of?n?URL parts, and returns a tuple of?n?values.?This is similar to the?parse_url()?UDF but can extract multiple parts at once out of a URL. Valid part names are: HOST, PATH, QUERY, REF, PROTOCOL, AUTHORITY, FILE, USERINFO, QUER",now(),"",now()); +-- 分析窗口函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","分析窗口函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="分析窗口函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dense_rank","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dense_rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column dense_rank()","returns the rank of rows within a window partition, without any gaps.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","dense_rank ","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "dense_rank "; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","dense_rank ( ) OVER ( [query_partition_clause] order_by_clause )","Returns an ascending sequence of integers, starting with 1. The output sequence produces duplicate integers for duplicate values of the ORDER BY expressions.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","first_value","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "first_value"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," first_value(expr) OVER([partition_by_clause] order_by_clause [window_clause])","Returns the expression value from the first row in the window. The return value is NULL if the input expression is NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lag","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lag"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," lag(expr [, offset] [, default]) OVER ([partition_by_clause] order_by_clause)","This function returns the value of an expression using column values from a preceding row. You specify an integer offset, which designates a row position some number of rows previous to the current row. Any column references in the expression argument ref",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","last_value","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "last_value"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," last_value(expr) OVER([partition_by_clause] order_by_clause [window_clause])","Returns the expression value from the last row in the window. The return value is NULL if the input expression is NULL.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lead","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lead"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," lead(expr [, offset] [, default]) OVER([partition_by_clause] order_by_clause)","This function returns the value of an expression using column values from a following row. You specify an integer offset, which designates a row position some number of rows after to the current row. Any column references in the expression argument refer ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","ntile","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "ntile"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," ntile(int n) OVER([partition_by_clause] order_by_clause)","用于将分组数据按照顺序切分成n片,返回当前切片值",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","percent_rank","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "percent_rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," percent_rank() OVER([partition_by_clause] order_by_clause)","分组内当前行的RANK值-1/分组内总行数-1",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","cume_dist","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "cume_dist"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," cume_dist() OVER([partition_by_clause] order_by_clause)","小于等于当前值的行数/分组内总行数",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","rank","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "rank"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," rank() OVER([partition_by_clause] order_by_clause)","Returns an ascending sequence of integers, starting with 1. The output sequence produces duplicate integers for duplicate values of the ORDER BY expressions. After generating duplicate output values for the tied input values, the function increments the s",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","row_number","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "row_number"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," row_number() OVER([partition_by_clause] order_by_clause)","Returns an ascending sequence of integers, starting with 1. Starts the sequence over for each group produced by the PARTITIONED BY clause. The output sequence includes different values for duplicate input values. Therefore, the sequence never contains any",now(),"",now()); +-- 其它函数 +INSERT INTO linkis_ps_udf_tree (parent,name,user_name,description,create_time,update_time,category) VALUES ("1","其它函数","sys","",now(),now(),"udf"); +select @ps_udf_tree_id := id from linkis_ps_udf_tree where name ="其它函数" and user_name ="sys" and category = "udf"; +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","spark_partition_id","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "spark_partition_id"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column spark_partition_id()","returns partition ID.This is indeterministic because it depends on data partitioning and task scheduling.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","to_json","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "to_json"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column to_json(Column e,scala.collection.immutable.Map options)","(Scala-specific) Converts a column containing a StructType into a JSON string with the specified schema. Throws an exception, in the case of an unsupported type.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","window","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "window"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column window(Column timeColumn, String windowDuration, String slideDuration)","Bucketize rows into one or more time windows given a timestamp specifying column. Window starts are inclusive but the window ends are exclusive, e.g. 12:05 will be in the window [12:05,12:10) but not in [12:00,12:05). Windows can support microsecond preci",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","approxCountDistinct","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "approxCountDistinct"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column approxCountDistinct(Column e)"," Use approx_count_distinct. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc_nulls_first","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc_nulls_first"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc_nulls_first(String columnName)","Returns a sort expression based on ascending order of the column, and null values return before non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc_nulls_last","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc_nulls_last"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc_nulls_last(String columnName)","Returns a sort expression based on ascending order of the column, and null values appear after non-null values.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","asc","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "asc"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column asc(String columnName)","Returns a sort expression based on ascending order of the column.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","bitwiseNOT","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "bitwiseNOT"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column expr(String expr)","Parses the expression string into the column that it represents, similar to DataFrame.selectExpr",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","broadcast","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "broadcast"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Dataset broadcast(Dataset df)","Marks a DataFrame as small enough for use in broadcast joins.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","callUDF","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "callUDF"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column callUDF(String udfName, scala.collection.Seq cols)","Call an user-defined function.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","from_json","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "from_json"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","Column from_json(Column e,StructType schema,scala.collection.immutable.Map options)","(Scala-specific) Parses a column containing a JSON string into a StructType with the specified schema. Returns null, in the case of an unparseable string.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","lit","1",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "lit"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None"," Column lit(Object literal)","Creates a Column of literal value.The passed in object is returned directly if it is already a Column. If the object is a Scala Symbol, it is converted into a Column also. Otherwise, a new Column is created to represent the literal value.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","md5","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "md5"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string md5(string/binary)","Calculates an MD5 128-bit checksum for the string or binary . The value is returned as a string of 32 hex digits, or NULL if the argument was NULL. Example: md5('ABC') = '902fbdd2b1df0c4f70b4a5d23525e932'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha1","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha1"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha1(string/binary)","Calculates the SHA-1 digest for string or binary and returns the value as a hex string . Example: sha1('ABC') = '3c01bdbb26f358bab27f267924aa2c9a03fcfdb8'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha(string/binary)","Calculates the SHA-1 digest for string or binary and returns the value as a hex string . Example: sha1('ABC') = '3c01bdbb26f358bab27f267924aa2c9a03fcfdb8'.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","sha2","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "sha2"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string sha2(string/binary, int)","Calculates the SHA-2 family of hash functions (SHA-224, SHA-256, SHA-384, and SHA-512) . The first argument is the string or binary to be hashed. The second argument indicates the desired bit length of the result, which must have a value of 224, 256, 384,",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","reflect","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "reflect"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","varies reflect(class, method[, arg1[, arg2..]])","Calls a Java method by matching the argument signature, using reflection. ",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","crc32","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "crc32"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","bigint crc32(string/binary)","Computes a cyclic redundancy check value for string or binary argument and returns bigint value . Example: crc32('ABC') = 2743272264.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","aes_decrypt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "aes_decrypt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary aes_decrypt(input binary, key string/binary)","Decrypt input using AES . Key lengths of 128, 192 or 256 bits can be used. 192 and 256 bits keys can be used if Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files are installed. If either argument is NULL or the key length is n",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","aes_encrypt","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "aes_encrypt"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","binary aes_encrypt(input string/binary, key string/binary)","Encrypt input using AES . Key lengths of 128, 192 or 256 bits can be used. 192 and 256 bits keys can be used if Java Cryptography Extension (JCE) Unlimited Strength Jurisdiction Policy Files are installed. If either argument is NULL or the key length is n",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","hash","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "hash"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","int hash(a1[, a2...])","Returns a hash value of the arguments.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_database","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_database"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string current_database()","Returns current database name .",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","current_user","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "current_user"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string current_user()","Returns current user name from the configured authenticator manager?. Could be the same as the user provided when connecting, but with some authentication managers (for example HadoopDefaultAuthenticator) it could be different.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","logged_in_user","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "logged_in_user"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string logged_in_user()","Returns current user name from the session state. This is the username provided when connecting to Hive.",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","version","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "version"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","string version()","Returns the Hive version. The string contains 2 fields, the first being a build number and the second being a build hash. Example: select version(); might return 2.1.0.2.5.0.0-1245 r027527b9c5ce1a3d7d0b6d2e6de2378fb0c39232. Actual results will depend on y",now(),"",now()); +INSERT INTO linkis_ps_udf_baseinfo (create_user,udf_name,udf_type,tree_id,create_time,update_time,sys,cluster_name) VALUES ("sys","java_method","0",@ps_udf_tree_id,now(),now(),"IDE","all"); +select @ps_udf_baseinfo_id := id from linkis_ps_udf_baseinfo where create_user = "sys" and udf_name = "java_method"; +INSERT INTO linkis_ps_udf_version (udf_id,path,bml_resource_id,bml_resource_version,is_published,register_format,use_format,description,create_time,md5,update_time) VALUES (@ps_udf_baseinfo_id,"file://","","","","None","varies java_method(class, method[, arg1[, arg2..]])","Synonym for?reflect. ",now(),"",now()); + + + + + + + diff --git a/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql b/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql index 6d78b7b670..19c23b116c 100644 --- a/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql +++ b/linkis-dist/package/db/upgrade/1.5.0_schema/mysql/linkis_ddl.sql @@ -15,4 +15,72 @@ * limitations under the License. */ -ALTER TABLE `linkis_cg_manager_label` MODIFY COLUMN label_key varchar(50); \ No newline at end of file +ALTER TABLE `linkis_cg_manager_label` MODIFY COLUMN label_key varchar(50); +ALTER TABLE linkis_ps_udf_user_load ADD CONSTRAINT uniq_uid_uname UNIQUE (`udf_id`, `user_name`); +ALTER TABLE linkis_ps_bml_resources ADD CONSTRAINT uniq_rid_eflag UNIQUE (`resource_id`, `enable_flag`); + + +ALTER TABLE linkis_ps_configuration_config_key ADD UNIQUE uniq_key_ectype (`key`,`engine_conn_type`); + +ALTER TABLE linkis_ps_configuration_config_key modify column engine_conn_type varchar(50) DEFAULT '' COMMENT 'engine type,such as spark,hive etc'; + +ALTER TABLE linkis_ps_common_lock ADD COLUMN locker VARCHAR(255) NOT NULL COMMENT 'locker'; + +ALTER TABLE linkis_ps_configuration_config_key ADD column template_required tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must'; +ALTER TABLE linkis_ps_configuration_config_key ADD column `boundary_type` int(2) NOT NULL COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both'; +ALTER TABLE linkis_ps_configuration_config_value modify COLUMN config_value varchar(500); + +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_template_config_key` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `template_name` VARCHAR(200) NOT NULL COMMENT 'Configuration template name redundant storage', + `template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `validate_range` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Verification regularity (reserved)', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`), + UNIQUE INDEX `uniq_tname_kid` (`template_uuid`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_key_limit_for_user` ( + `id` BIGINT(20) NOT NULL AUTO_INCREMENT, + `user_name` VARCHAR(50) NOT NULL COMMENT 'username', + `combined_label_value` VARCHAR(128) NOT NULL COMMENT 'Combined label combined_userCreator_engineType such as hadoop-IDE,spark-2.4.3', + `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT 'configuration value', + `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'upper limit value', + `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Lower limit value (reserved)', + `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid template id recorded by the third party', + `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT 'Is it valid? Reserved Y/N', + `create_by` VARCHAR(50) NOT NULL COMMENT 'Creator', + `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT 'Update by', + `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) + )ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + +DROP TABLE IF EXISTS `linkis_ps_configutation_lm_across_cluster_rule`; +CREATE TABLE IF NOT EXISTS linkis_ps_configutation_lm_across_cluster_rule ( + id INT AUTO_INCREMENT COMMENT 'Rule ID, auto-increment primary key', + cluster_name char(32) NOT NULL COMMENT 'Cluster name, cannot be empty', + creator char(32) NOT NULL COMMENT 'Creator, cannot be empty', + username char(32) NOT NULL COMMENT 'User, cannot be empty', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Creation time, cannot be empty', + create_by char(32) NOT NULL COMMENT 'Creator, cannot be empty', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Modification time, cannot be empty', + update_by char(32) NOT NULL COMMENT 'Updater, cannot be empty', + rules varchar(256) NOT NULL COMMENT 'Rule content, cannot be empty', + is_valid VARCHAR(2) DEFAULT 'N' COMMENT 'Is it valid Y/N', + PRIMARY KEY (id), + UNIQUE KEY idx_creator_username (creator, username) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/pom.xml b/linkis-public-enhancements/linkis-configuration/pom.xml index fc7faea35c..45a1ee37d9 100644 --- a/linkis-public-enhancements/linkis-configuration/pom.xml +++ b/linkis-public-enhancements/linkis-configuration/pom.xml @@ -59,6 +59,13 @@ linkis-computation-client ${project.version} + + + org.instancio + instancio-junit + 2.16.1 + test + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java new file mode 100644 index 0000000000..f2fee2ff1f --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/conf/AcrossClusterRuleKeys.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.conf; + +public class AcrossClusterRuleKeys { + + public static final String KEY_QUEUE_SUFFIX = "suffix"; + + public static final String KEY_ACROSS_CLUSTER_QUEUE_SUFFIX = "bdap2bdp"; + + public static final String KEY_START_TIME = "startTime"; + + public static final String KEY_END_TIME = "endTime"; + + public static final String KEY_CPU_THRESHOLD = "CPUThreshold"; + + public static final String KEY_MEMORY_THRESHOLD = "MemoryThreshold"; + + public static final String KEY_CPU_PERCENTAGE_THRESHOLD = "CPUPercentageThreshold"; + + public static final String KEY_MEMORY_PERCENTAGE_THRESHOLD = "MemoryPercentageThreshold"; + + public static final String KEY_QUEUE_RULE = "queueRule"; + + public static final String KEY_TIME_RULE = "timeRule"; + + public static final String KEY_THRESHOLD_RULE = "thresholdRule"; +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java new file mode 100644 index 0000000000..9dadcf918c --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/AcrossClusterRuleMapper.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.AcrossClusterRule; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface AcrossClusterRuleMapper { + + AcrossClusterRule getAcrossClusterRule(@Param("id") Long id); + + void deleteAcrossClusterRule( + @Param("creator") String creator, @Param("username") String username); + + void updateAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); + + void insertAcrossClusterRule(@Param("acrossClusterRule") AcrossClusterRule acrossClusterRule); + + List queryAcrossClusterRuleList( + @Param("username") String username, + @Param("creator") String creator, + @Param("clusterName") String clusterName); + + void validAcrossClusterRule(@Param("isValid") String isValid, @Param("id") Long id); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java new file mode 100644 index 0000000000..0993b2cbed --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapper.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitForUser; +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** for table linkis_ps_configuration_key_limit_for_user @Description */ +public interface ConfigKeyLimitForUserMapper { + + int batchInsertList(List list); + + int updateByPrimaryKey(ConfigKeyLimitForUser configKeyLimitForUser); + + int batchInsertOrUpdateList(List list); + + List selectByLabelAndKeyIds( + @Param("label") String label, @Param("keyIdList") List keyIdList); + + ConfigKeyLimitVo selectByLabelAndKeyId(@Param("label") String label, @Param("keyId") Long keyId); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java index 6b6b15a65c..ee5506d9eb 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/ConfigMapper.java @@ -17,10 +17,7 @@ package org.apache.linkis.configuration.dao; -import org.apache.linkis.configuration.entity.CategoryLabel; -import org.apache.linkis.configuration.entity.ConfigKey; -import org.apache.linkis.configuration.entity.ConfigKeyValue; -import org.apache.linkis.configuration.entity.ConfigValue; +import org.apache.linkis.configuration.entity.*; import org.apache.ibatis.annotations.Param; @@ -28,19 +25,14 @@ public interface ConfigMapper { - List getConfigByEngineUserCreator( - @Param("engineType") String engineType, - @Param("creator") String creator, - @Param("userName") String userName); - List getConfigKeyByLabelIds(@Param("ids") List ids); List getConfigKeyValueByLabelId(@Param("labelId") Integer labelId); - Long selectAppIDByAppName(@Param("name") String appName); - void insertValue(ConfigValue configValue); + int batchInsertOrUpdateValueList(List list); + ConfigValue getConfigValueById(@Param("id") Long id); ConfigValue getConfigValueByKeyAndLabel(ConfigValue configValue); @@ -57,9 +49,14 @@ List getConfigByEngineUserCreator( List selectKeyByKeyName(@Param("keyName") String keyName); - List listKeyByStringValue(@Param("stringValue") String stringValue); + List selectKeyByEngineType(@Param("engineType") String engineType); + + List selectKeyByEngineTypeAndKeyList( + @Param("engineType") String engineType, @Param("keyList") List keyList); - void insertCreator(String creator); + List selectKeyByKeyIdList(@Param("keyIdList") List keyList); + + List listKeyByStringValue(@Param("stringValue") String stringValue); List getCategory(); @@ -74,4 +71,21 @@ List getConfigByEngineUserCreator( void insertKey(ConfigKey key); List getConfigEnKeyValueByLabelId(@Param("labelId") Integer labelId); + + void deleteConfigKey(@Param("id") Integer id); + + List getConfigBykey(@Param("engineType") String engineType, @Param("key") String key); + + List getConfigEnBykey( + @Param("engineType") String engineType, @Param("key") String key); + + List getUserConfigValue( + @Param("key") String key, + @Param("user") String user, + @Param("creator") String creator, + @Param("engineType") String engineType); + + void insertKeyByBase(ConfigKey configKey); + + void updateConfigKey(ConfigKey configKey); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java index d199134b4b..1a513e3352 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/LabelMapper.java @@ -28,8 +28,13 @@ public interface LabelMapper { ConfigLabel getLabelByKeyValue( @Param("labelKey") String labelKey, @Param("stringValue") String stringValue); + // label key:combined_userCreator_engineType + List selectUserCreatorEngineTypeLabelList(@Param("itemList") List itemList); + void insertLabel(ConfigLabel label); + void batchInsertLabel(@Param("labelList") List labelList); + void deleteLabel(@Param("ids") List ids); ConfigLabel getLabelById(@Param("id") Integer id); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java new file mode 100644 index 0000000000..6862650f27 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapper.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.TemplateConfigKey; +import org.apache.linkis.configuration.entity.TemplateConfigKeyVO; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +/** The dao interface class of the linkis_ps_configuration_template_config_key table @Description */ +public interface TemplateConfigKeyMapper { + + int batchInsertList(List list); + + List selectListByTemplateUuid(@Param("templateUuid") String templateUuid); + + int deleteByTemplateUuidAndKeyIdList( + @Param("templateUuid") String templateUuid, @Param("keyIdList") List KeyIdList); + + int batchInsertOrUpdateList(List list); + + List selectListByTemplateUuidList( + @Param("templateUuidList") List templateUuidList); + + List selectInfoListByTemplateUuid( + @Param("templateUuid") String templateUuid); + + List selectInfoListByTemplateName( + @Param("templateName") String templateName); + + List selectEngineTypeByTemplateUuid(@Param("templateUuid") String templateUuid); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java new file mode 100644 index 0000000000..c24cfd3d44 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/AcrossClusterRule.java @@ -0,0 +1,149 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +public class AcrossClusterRule { + + private Long id; + private String clusterName; + private String creator; + private String username; + private Date createTime; + private String createBy; + private Date updateTime; + private String updateBy; + private String rules; + private String isValid; + + public AcrossClusterRule() {} + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getClusterName() { + return clusterName; + } + + public void setClusterName(String clusterName) { + this.clusterName = clusterName; + } + + public String getCreator() { + return creator; + } + + public void setCreator(String creator) { + this.creator = creator; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public String getRules() { + return rules; + } + + public void setRules(String rules) { + this.rules = rules; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + @Override + public String toString() { + return "AcrossClusterRule{" + + "id=" + + id + + ", clusterName='" + + clusterName + + '\'' + + ", creator='" + + creator + + '\'' + + ", username='" + + username + + '\'' + + ", createTime=" + + createTime + + ", createBy='" + + createBy + + '\'' + + ", updateTime=" + + updateTime + + ", updateBy='" + + updateBy + + '\'' + + ", rules='" + + rules + + '\'' + + ", isValid='" + + isValid + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java index 1e26252a7c..4c471409ab 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKey.java @@ -17,6 +17,9 @@ package org.apache.linkis.configuration.entity; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; + +@JsonIgnoreProperties(ignoreUnknown = true) public class ConfigKey { private Long id; @@ -43,6 +46,22 @@ public class ConfigKey { private String treeName; + /* + 0 none + 1 with mix + 2 with max + 3 min and max both + */ + private Integer boundaryType; + + private String enName; + + private String enDescription; + + private String enTreeName; + + private Boolean templateRequired; + public String getEngineType() { return engineType; } @@ -138,4 +157,84 @@ public Integer getLevel() { public void setLevel(Integer level) { this.level = level; } + + public Integer getBoundaryType() { + return boundaryType; + } + + public void setBoundaryType(Integer boundaryType) { + this.boundaryType = boundaryType; + } + + public String getEnName() { + return enName; + } + + public void setEnName(String enName) { + this.enName = enName; + } + + public String getEnDescription() { + return enDescription; + } + + public void setEnDescription(String enDescription) { + this.enDescription = enDescription; + } + + public String getEnTreeName() { + return enTreeName; + } + + public void setEnTreeName(String enTreeName) { + this.enTreeName = enTreeName; + } + + public Boolean getTemplateRequired() { + return templateRequired; + } + + public void setTemplateRequired(Boolean templateRequired) { + this.templateRequired = templateRequired; + } + + @Override + public String toString() { + return "ConfigKey{" + + "id=" + + id + + ", key='" + + key + + '\'' + + ", description='" + + description + + '\'' + + ", name='" + + name + + '\'' + + ", engineType='" + + engineType + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", validateType='" + + validateType + + '\'' + + ", validateRange='" + + validateRange + + '\'' + + ", isAdvanced=" + + isAdvanced + + ", isHidden=" + + isHidden + + ", level=" + + level + + ", treeName='" + + treeName + + '\'' + + ", boundaryType=" + + boundaryType + + '}'; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java new file mode 100644 index 0000000000..a626f32255 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitForUser.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +/** for table linkis_ps_configuration_key_limit_for_user @Description */ +public class ConfigKeyLimitForUser { + + /** Table field: id Field type: bigint(19) */ + private Long id; + + /** Username table field: user_name field type: varchar(50) */ + private String userName; + + /** + * combined label combined_userCreator_engineType such as hadoop-IDE, spark-2.4.3 table field: + * combined_label_value field type: varchar(200) + */ + private String combinedLabelValue; + + /** id of linkis_ps_configuration_config_key table field: key_id field type: bigint(19) */ + private Long keyId; + + /** Configuration value table field: config_value field type: varchar(200) */ + private String configValue; + + /** Upper limit table field: max_value field type: varchar(50) */ + private String maxValue; + + /** Lower limit value (reserved) table field: min_value field type: varchar(50) */ + private String minValue; + + /** + * uuid The template id table field of the third-party record: latest_update_template_uuid Field + * type: varchar(34) + */ + private String latestUpdateTemplateUuid; + + /** Is it valid Reserved Y/N table field: is_valid field type: varchar(2) */ + private String isValid; + + /** Creator table field: create_by field type: varchar(50) */ + private String createBy; + + /** + * create time table field: create_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date createTime; + + /** Updater table field: update_by field type: varchar(50) */ + private String updateBy; + + /** + * update time table field: update_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date updateTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getUserName() { + return userName; + } + + public void setUserName(String userName) { + this.userName = userName; + } + + public String getCombinedLabelValue() { + return combinedLabelValue; + } + + public void setCombinedLabelValue(String combinedLabelValue) { + this.combinedLabelValue = combinedLabelValue; + } + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getMinValue() { + return minValue; + } + + public void setMinValue(String minValue) { + this.minValue = minValue; + } + + public String getLatestUpdateTemplateUuid() { + return latestUpdateTemplateUuid; + } + + public void setLatestUpdateTemplateUuid(String latestUpdateTemplateUuid) { + this.latestUpdateTemplateUuid = latestUpdateTemplateUuid; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(" ["); + sb.append("Hash = ").append(hashCode()); + sb.append(", id=").append(id); + sb.append(", userName=").append(userName); + sb.append(", combinedLabelValue=").append(combinedLabelValue); + sb.append(", keyId=").append(keyId); + sb.append(", configValue=").append(configValue); + sb.append(", maxValue=").append(maxValue); + sb.append(", minValue=").append(minValue); + sb.append(", latestUpdateTemplateUuid=").append(latestUpdateTemplateUuid); + sb.append(", isValid=").append(isValid); + sb.append(", createBy=").append(createBy); + sb.append(", createTime=").append(createTime); + sb.append(", updateBy=").append(updateBy); + sb.append(", updateTime=").append(updateTime); + sb.append(']'); + return sb.toString(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java new file mode 100644 index 0000000000..c612168713 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyLimitVo.java @@ -0,0 +1,64 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class ConfigKeyLimitVo { + + /** id : bigint(19) */ + private Long keyId; + + private String key; + + /** config_value varchar(200) */ + private String configValue; + + /** max_value varchar(50) */ + private String maxValue; + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java index 143566218c..19266bc691 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigKeyValue.java @@ -17,6 +17,8 @@ package org.apache.linkis.configuration.entity; +import java.util.Map; + public class ConfigKeyValue { private Long id; @@ -53,6 +55,16 @@ public class ConfigKeyValue { private Boolean isUserDefined; + private Map specialLimit; + + public Map getSpecialLimit() { + return specialLimit; + } + + public void setSpecialLimit(Map specialLimit) { + this.specialLimit = specialLimit; + } + public Boolean getIsUserDefined() { return isUserDefined; } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java new file mode 100644 index 0000000000..273828ff02 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/ConfigUserValue.java @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class ConfigUserValue { + + private String key; + + private String name; + // linkis_ps_configuration_config_key id + private Integer configKeyId; + + private String description; + + private String defaultValue; + + private String engineType; + // linkis_ps_configuration_config_value id + private Integer configValueId; + + private String configValue; + // linkis_cg_manager_label id + private Integer configLabelId; + + private String labelValue; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } + + public Integer getConfigKeyId() { + return configKeyId; + } + + public void setConfigKeyId(Integer configKeyId) { + this.configKeyId = configKeyId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getDefaultValue() { + return defaultValue; + } + + public void setDefaultValue(String defaultValue) { + this.defaultValue = defaultValue; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public Integer getConfigValueId() { + return configValueId; + } + + public void setConfigValueId(Integer configValueId) { + this.configValueId = configValueId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public Integer getConfigLabelId() { + return configLabelId; + } + + public void setConfigLabelId(Integer configLabelId) { + this.configLabelId = configLabelId; + } + + public String getLabelValue() { + return labelValue; + } + + public void setLabelValue(String labelValue) { + this.labelValue = labelValue; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + @Override + public String toString() { + return "ConfigUserValue{" + + "key='" + + key + + '\'' + + ", name='" + + name + + '\'' + + ", configKeyId=" + + configKeyId + + ", description='" + + description + + '\'' + + ", defaultValue='" + + defaultValue + + '\'' + + ", engineType='" + + engineType + + '\'' + + ", configValueId=" + + configValueId + + ", configValue='" + + configValue + + '\'' + + ", configLabelId=" + + configLabelId + + ", labelValue='" + + labelValue + + '\'' + + '}'; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java new file mode 100644 index 0000000000..b29b3742f2 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKey.java @@ -0,0 +1,200 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +import java.util.Date; + +/** The entity class of the linkis_ps_configuration_template_config_key table @Description */ +public class TemplateConfigKey { + + /** Table field: id Field type: bigint(19) */ + private Long id; + + /** + * Configuration template name redundant storage table field: template_name field type: + * varchar(200) + */ + private String templateName; + + /** + * uuid The template id table field of the third-party record: template_uuid Field type: + * varchar(34) + */ + private String templateUuid; + + /** id of linkis_ps_configuration_config_key table field: key_id field type: bigint(19) */ + private Long keyId; + + /** Configuration value table field: config_value field type: varchar(200) */ + private String configValue; + + /** Upper limit table field: max_value field type: varchar(50) */ + private String maxValue; + + /** Lower limit value (reserved) table field: min_value field type: varchar(50) */ + private String minValue; + + /** Validation regularity (reserved) table field: validate_range field type: varchar(50) */ + private String validateRange; + + /** Is it valid Reserved Y/N table field: is_valid field type: varchar(2) */ + private String isValid; + + /** Creator table field: create_by field type: varchar(50) */ + private String createBy; + + /** + * create time table field: create_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date createTime; + + /** Updater table field: update_by field type: varchar(50) */ + private String updateBy; + + /** + * update time table field: update_time field type: timestamp(19) default value: CURRENT_TIMESTAMP + */ + private Date updateTime; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getTemplateName() { + return templateName; + } + + public void setTemplateName(String templateName) { + this.templateName = templateName; + } + + public String getTemplateUuid() { + return templateUuid; + } + + public void setTemplateUuid(String templateUuid) { + this.templateUuid = templateUuid; + } + + public Long getKeyId() { + return keyId; + } + + public void setKeyId(Long keyId) { + this.keyId = keyId; + } + + public String getConfigValue() { + return configValue; + } + + public void setConfigValue(String configValue) { + this.configValue = configValue; + } + + public String getMaxValue() { + return maxValue; + } + + public void setMaxValue(String maxValue) { + this.maxValue = maxValue; + } + + public String getMinValue() { + return minValue; + } + + public void setMinValue(String minValue) { + this.minValue = minValue; + } + + public String getValidateRange() { + return validateRange; + } + + public void setValidateRange(String validateRange) { + this.validateRange = validateRange; + } + + public String getIsValid() { + return isValid; + } + + public void setIsValid(String isValid) { + this.isValid = isValid; + } + + public String getCreateBy() { + return createBy; + } + + public void setCreateBy(String createBy) { + this.createBy = createBy; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getUpdateBy() { + return updateBy; + } + + public void setUpdateBy(String updateBy) { + this.updateBy = updateBy; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(getClass().getSimpleName()); + sb.append(" ["); + sb.append("Hash = ").append(hashCode()); + sb.append(", id=").append(id); + sb.append(", templateName=").append(templateName); + sb.append(", templateUuid=").append(templateUuid); + sb.append(", keyId=").append(keyId); + sb.append(", configValue=").append(configValue); + sb.append(", maxValue=").append(maxValue); + sb.append(", minValue=").append(minValue); + sb.append(", validateRange=").append(validateRange); + sb.append(", isValid=").append(isValid); + sb.append(", createBy=").append(createBy); + sb.append(", createTime=").append(createTime); + sb.append(", updateBy=").append(updateBy); + sb.append(", updateTime=").append(updateTime); + sb.append(']'); + return sb.toString(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java new file mode 100644 index 0000000000..796a90fa63 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/TemplateConfigKeyVO.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.entity; + +public class TemplateConfigKeyVO extends TemplateConfigKey { + + private String key; + + public String getKey() { + return key; + } + + public void setKey(String key) { + this.key = key; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java index 05ec8046fd..77d2c67576 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/entity/UserIpVo.java @@ -19,10 +19,12 @@ import java.util.Date; +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; @ApiModel +@JsonIgnoreProperties(ignoreUnknown = true) public class UserIpVo { @ApiModelProperty("id") diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java new file mode 100644 index 0000000000..79bff7cae9 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/enumeration/BoundaryTypeEnum.java @@ -0,0 +1,41 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.enumeration; + +public enum BoundaryTypeEnum { + /* + 0 none + 1 with mix + 2 with max + 3 min and max both + */ + NONE(0), + WITH_MIX(1), + WITH_MAX(2), + WITH_BOTH(3); + + private Integer id; + + BoundaryTypeEnum(Integer id) { + this.id = id; + } + + public Integer getId() { + return this.id; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java index f02e0398f5..9f4a369d8d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/errorcode/LinkisConfigurationErrorCodeSummary.java @@ -30,7 +30,7 @@ public enum LinkisConfigurationErrorCodeSummary implements LinkisErrorCode { CONFIGKEY_CANNOT_BE_NULL(14100, "ConfigKey cannot be null(configKey 不能为空)"), CONFIG_KEY_NOT_EXISTS(14100, "Config key not exists:{0}(配置键不存在:{0})"), LABEL_NOT_EXISTS(14100, "Label not exists:{0}(标签不存在{0})"), - KEY_OR_VALUE_CANNOT(14100, "Key or value cannot be null(键或值不能为空)"), + KEY_CANNOT_EMPTY(14100, "Key cannot be null(Key 不能为空)"), PARAMS_CANNOT_BE_EMPTY(14100, "Params cannot be empty!(参数不能为空!)"), TOKEN_IS_ERROR(14100, "Token is error(令牌是错误的)"), IS_NULL_CANNOT_BE_ADDED(14100, "CategoryName is null, cannot be added(categoryName 为空,无法添加)"), @@ -39,9 +39,9 @@ public enum LinkisConfigurationErrorCodeSummary implements LinkisErrorCode { ENGINE_TYPE_IS_NULL(14100, "Engine type is null, cannot be added(引擎类型为空,无法添加)"), INCORRECT_FIXED_SUCH( 14100, - "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-3.2.1(保存的引擎类型参数有误,请按照固定格式传送,例如spark-3.2.1)"), + "The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)"), INCOMPLETE_RECONFIRM(14100, "Incomplete request parameters, please reconfirm(请求参数不完整,请重新确认)"), - ONLY_ADMIN_CAN_MODIFY(14100, "Only admin can modify category(只有管理员才能修改目录)"), + ONLY_ADMIN_PERFORM(14100, "Only admin have permission to perform this operation(限管理员执行此操作)"), THE_LABEL_PARAMETER_IS_EMPTY(14100, " The label parameter is empty(标签参数为空)"), ERROR_VALIDATOR_RANGE(14100, "Error validator range!(错误验证器范围!)"), TYPE_OF_LABEL_NOT_SUPPORTED(14100, "This type of label is not supported:{0}(不支持这种类型的标签:{0})"); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java new file mode 100644 index 0000000000..3a01c86060 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/AcrossClusterRuleRestfulApi.java @@ -0,0 +1,332 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.restful.api; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.configuration.entity.AcrossClusterRule; +import org.apache.linkis.configuration.service.AcrossClusterRuleService; +import org.apache.linkis.configuration.util.CommonUtils; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.*; + +import javax.servlet.http.HttpServletRequest; + +import java.util.Map; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "across cluster rule api") +@RestController +@RequestMapping(path = "/configuration/acrossClusterRule") +public class AcrossClusterRuleRestfulApi { + + @Autowired private AcrossClusterRuleService acrossClusterRuleService; + + private Logger log = LoggerFactory.getLogger(this.getClass()); + + @ApiOperation( + value = "valid acrossClusterRule", + notes = "valid acrossClusterRule", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "id", dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + }) + @RequestMapping(path = "/isValid", method = RequestMethod.PUT) + public Message isValidRule(HttpServletRequest req, @RequestBody Map json) { + String operationUser = ModuleUserUtils.getOperationUser(req, "execute valid acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to valid acrossClusterRule List,msg: only administrators can configure"); + } + + Integer idInt = (Integer) json.get("id"); + Long id = idInt.longValue(); + String isValid = (String) json.get("isValid"); + + if (StringUtils.isBlank(isValid)) { + return Message.error("Failed to valid acrossClusterRule: Illegal Input Param"); + } + + try { + acrossClusterRuleService.validAcrossClusterRule(id, isValid); + } catch (Exception e) { + log.info("valid acrossClusterRule failed:" + e.getMessage()); + return Message.error("valid acrossClusterRule failed"); + } + + return Message.ok(); + } + + @ApiOperation( + value = "query acrossClusterRule list", + notes = "query acrossClusterRule list", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + }) + @RequestMapping(path = "/list", method = RequestMethod.GET) + public Message queryAcrossClusterRuleList( + HttpServletRequest req, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "username", required = false) String username, + @RequestParam(value = "clusterName", required = false) String clusterName, + @RequestParam(value = "pageNow", required = false) Integer pageNow, + @RequestParam(value = "pageSize", required = false) Integer pageSize) { + String operationUser = + ModuleUserUtils.getOperationUser(req, "execute query acrossClusterRule List"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to query acrossClusterRule List,msg: only administrators can configure"); + } + + if (StringUtils.isBlank(username)) username = null; + if (StringUtils.isBlank(creator)) creator = null; + if (StringUtils.isBlank(clusterName)) clusterName = null; + if (null == pageNow) pageNow = 1; + if (null == pageSize) pageSize = 20; + + Map resultMap = null; + try { + resultMap = + acrossClusterRuleService.queryAcrossClusterRuleList( + creator, username, clusterName, pageNow, pageSize); + } catch (Exception e) { + log.info("query acrossClusterRule List failed:" + e.getMessage()); + return Message.error("query acrossClusterRule List failed"); + } + + Message msg = Message.ok(); + msg.getData().putAll(resultMap); + return msg; + } + + @ApiOperation( + value = "delete acrossClusterRule", + notes = "delete acrossClusterRule", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + }) + @RequestMapping(path = "/delete", method = RequestMethod.DELETE) + public Message deleteAcrossClusterRule( + HttpServletRequest req, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "username", required = false) String username) { + String operationUser = + ModuleUserUtils.getOperationUser(req, "execute delete acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to delete acrossClusterRule,msg: only administrators can configure"); + } + + if (StringUtils.isBlank(creator) || StringUtils.isBlank(username)) { + return Message.error("Failed to delete acrossClusterRule: Illegal Input Param"); + } + + try { + acrossClusterRuleService.deleteAcrossClusterRule(creator, username); + } catch (Exception e) { + log.info("delete acrossClusterRule failed:" + e.getMessage()); + return Message.error("delete acrossClusterRule failed"); + } + + return Message.ok(); + } + + @ApiOperation( + value = "update acrossClusterRule", + notes = "update acrossClusterRule ", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "id", dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + @ApiImplicitParam(name = "startTime", dataType = "String", value = "startTime"), + @ApiImplicitParam(name = "endTime", dataType = "String", value = "endTime"), + @ApiImplicitParam(name = "CPUThreshold", dataType = "String", value = "CPUThreshold"), + @ApiImplicitParam(name = "MemoryThreshold", dataType = "String", value = "MemoryThreshold"), + @ApiImplicitParam( + name = "CPUPercentageThreshold", + dataType = "String", + value = "CPUPercentageThreshold"), + @ApiImplicitParam( + name = "MemoryPercentageThreshold", + dataType = "String", + value = "MemoryPercentageThreshold"), + }) + @RequestMapping(path = "/update", method = RequestMethod.PUT) + public Message updateAcrossClusterRule( + HttpServletRequest req, @RequestBody Map json) { + String operationUser = + ModuleUserUtils.getOperationUser(req, "execute update acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to update acrossClusterRule,msg: only administrators can configure"); + } + + Integer idInt = (Integer) json.get("id"); + Long id = idInt.longValue(); + String clusterName = (String) json.get("clusterName"); + String creator = (String) json.get("creator"); + String username = (String) json.get("username"); + String isValid = (String) json.get("isValid"); + String startTime = (String) json.get("startTime"); + String endTime = (String) json.get("endTime"); + String CPUThreshold = (String) json.get("CPUThreshold"); + String MemoryThreshold = (String) json.get("MemoryThreshold"); + String CPUPercentageThreshold = (String) json.get("CPUPercentageThreshold"); + String MemoryPercentageThreshold = (String) json.get("MemoryPercentageThreshold"); + if (StringUtils.isBlank(clusterName) + || StringUtils.isBlank(creator) + || StringUtils.isBlank(username) + || StringUtils.isBlank(isValid) + || StringUtils.isBlank(startTime) + || StringUtils.isBlank(endTime) + || StringUtils.isBlank(CPUThreshold) + || StringUtils.isBlank(MemoryThreshold) + || StringUtils.isBlank(CPUPercentageThreshold) + || StringUtils.isBlank(MemoryPercentageThreshold)) { + return Message.error("Failed to add acrossClusterRule: Illegal Input Param"); + } + + try { + String rules = + CommonUtils.ruleMap2String( + startTime, + endTime, + CPUThreshold, + MemoryThreshold, + CPUPercentageThreshold, + MemoryPercentageThreshold); + AcrossClusterRule acrossClusterRule = new AcrossClusterRule(); + acrossClusterRule.setId(id); + acrossClusterRule.setClusterName(clusterName.toLowerCase()); + acrossClusterRule.setCreator(creator); + acrossClusterRule.setUsername(username); + acrossClusterRule.setUpdateBy(operationUser); + acrossClusterRule.setRules(rules); + acrossClusterRule.setIsValid(isValid); + acrossClusterRuleService.updateAcrossClusterRule(acrossClusterRule); + } catch (Exception e) { + log.info("update acrossClusterRule failed:" + e.getMessage()); + return Message.error("update acrossClusterRule failed:history already exist"); + } + return Message.ok(); + } + + @ApiOperation( + value = "add acrossClusterRule", + notes = "add acrossClusterRule ", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "req", dataType = "HttpServletRequest", value = "req"), + @ApiImplicitParam(name = "clusterName", dataType = "String", value = "clusterName"), + @ApiImplicitParam(name = "creator", dataType = "String", value = "creator"), + @ApiImplicitParam(name = "username", dataType = "String", value = "username"), + @ApiImplicitParam(name = "isValid", dataType = "String", value = "isValid"), + @ApiImplicitParam(name = "startTime", dataType = "String", value = "startTime"), + @ApiImplicitParam(name = "endTime", dataType = "String", value = "endTime"), + @ApiImplicitParam(name = "CPUThreshold", dataType = "String", value = "CPUThreshold"), + @ApiImplicitParam(name = "MemoryThreshold", dataType = "String", value = "MemoryThreshold"), + @ApiImplicitParam( + name = "CPUPercentageThreshold", + dataType = "String", + value = "CPUPercentageThreshold"), + @ApiImplicitParam( + name = "MemoryPercentageThreshold", + dataType = "String", + value = "MemoryPercentageThreshold"), + }) + @RequestMapping(path = "/add", method = RequestMethod.POST) + public Message insertAcrossClusterRule( + HttpServletRequest req, @RequestBody Map json) { + String operationUser = ModuleUserUtils.getOperationUser(req, "execute add acrossClusterRule"); + if (!Configuration.isAdmin(operationUser)) { + return Message.error( + "Failed to add acrossClusterRule,msg: only administrators can configure"); + } + + String clusterName = (String) json.get("clusterName"); + String creator = (String) json.get("creator"); + String username = (String) json.get("username"); + String isValid = (String) json.get("isValid"); + String startTime = (String) json.get("startTime"); + String endTime = (String) json.get("endTime"); + String CPUThreshold = (String) json.get("CPUThreshold"); + String MemoryThreshold = (String) json.get("MemoryThreshold"); + String CPUPercentageThreshold = (String) json.get("CPUPercentageThreshold"); + String MemoryPercentageThreshold = (String) json.get("MemoryPercentageThreshold"); + if (StringUtils.isBlank(clusterName) + || StringUtils.isBlank(creator) + || StringUtils.isBlank(username) + || StringUtils.isBlank(isValid) + || StringUtils.isBlank(startTime) + || StringUtils.isBlank(endTime) + || StringUtils.isBlank(CPUThreshold) + || StringUtils.isBlank(MemoryThreshold) + || StringUtils.isBlank(CPUPercentageThreshold) + || StringUtils.isBlank(MemoryPercentageThreshold)) { + return Message.error("Failed to add acrossClusterRule: Illegal Input Param"); + } + + try { + String rules = + CommonUtils.ruleMap2String( + startTime, + endTime, + CPUThreshold, + MemoryThreshold, + CPUPercentageThreshold, + MemoryPercentageThreshold); + AcrossClusterRule acrossClusterRule = new AcrossClusterRule(); + acrossClusterRule.setClusterName(clusterName.toLowerCase()); + acrossClusterRule.setCreator(creator); + acrossClusterRule.setUsername(username); + acrossClusterRule.setCreateBy(operationUser); + acrossClusterRule.setUpdateBy(operationUser); + acrossClusterRule.setRules(rules); + acrossClusterRule.setIsValid(isValid); + acrossClusterRuleService.insertAcrossClusterRule(acrossClusterRule); + } catch (Exception e) { + log.info("add acrossClusterRule failed:" + e.getMessage()); + return Message.error("add acrossClusterRule failed:history already exist"); + } + + return Message.ok(); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java index 283960d5df..11dfee8de1 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApi.java @@ -26,6 +26,7 @@ import org.apache.linkis.configuration.util.ConfigurationConfiguration; import org.apache.linkis.configuration.util.JsonNodeUtil; import org.apache.linkis.configuration.util.LabelEntityParser; +import org.apache.linkis.configuration.validate.ValidatorManager; import org.apache.linkis.manager.label.entity.engine.EngineTypeLabel; import org.apache.linkis.manager.label.entity.engine.UserCreatorLabel; import org.apache.linkis.manager.label.utils.LabelUtils; @@ -33,6 +34,7 @@ import org.apache.linkis.server.Message; import org.apache.linkis.server.utils.ModuleUserUtils; +import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; @@ -42,12 +44,13 @@ import java.io.IOException; import java.text.MessageFormat; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; +import java.util.*; +import java.util.stream.Collectors; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; import io.swagger.annotations.Api; import io.swagger.annotations.ApiImplicitParam; @@ -71,6 +74,8 @@ public class ConfigurationRestfulApi { @Autowired private ConfigKeyService configKeyService; + @Autowired private ValidatorManager validatorManager; + ObjectMapper mapper = new ObjectMapper(); private static final String NULL = "null"; @@ -144,6 +149,7 @@ public Message getFullTreesByAppName( ArrayList configTrees = configurationService.getFullTreeByLabelList( labelList, true, req.getHeader("Content-Language")); + return Message.ok().data("fullTree", configTrees); } @@ -152,9 +158,42 @@ public Message getFullTreesByAppName( public Message getCategory(HttpServletRequest req) { List categoryLabelList = categoryService.getAllCategory(req.getHeader("Content-Language")); + return Message.ok().data("Category", categoryLabelList); } + @ApiOperation( + value = "getItemList", + notes = "get configuration list by engineType", + response = Message.class) + @RequestMapping(path = "/getItemList", method = RequestMethod.GET) + public Message getItemList( + HttpServletRequest req, @RequestParam(value = "engineType") String engineType) + throws ConfigurationException { + ModuleUserUtils.getOperationUser(req, "getItemList with engineType:" + engineType); + // Adding * represents returning all configuration information + if ("*".equals(engineType)) { + engineType = null; + } + List result = configKeyService.getConfigKeyList(engineType); + List> filterResult = new ArrayList<>(); + for (ConfigKey configKey : result) { + Map temp = new HashMap<>(); + temp.put("key", configKey.getKey()); + temp.put("name", configKey.getName()); + temp.put("description", configKey.getDescription()); + temp.put("engineType", configKey.getEngineType()); + temp.put("validateType", configKey.getValidateType()); + temp.put("validateRange", configKey.getValidateRange()); + temp.put("boundaryType", configKey.getBoundaryType()); + temp.put("defaultValue", configKey.getDefaultValue()); + temp.put("require", configKey.getTemplateRequired()); + filterResult.add(temp); + } + + return Message.ok().data("itemList", filterResult); + } + @ApiOperation( value = "createFirstCategory", notes = "create first category", @@ -254,10 +293,23 @@ public Message saveFullTree(HttpServletRequest req, @RequestBody JsonNode json) String username = ModuleUserUtils.getOperationUser(req, "saveFullTree"); ArrayList createList = new ArrayList<>(); ArrayList updateList = new ArrayList<>(); + ArrayList> chekList = new ArrayList<>(); + String sparkConf = ""; for (Object o : fullTrees) { String s = BDPJettyServerHelper.gson().toJson(o); ConfigTree fullTree = BDPJettyServerHelper.gson().fromJson(s, ConfigTree.class); List settings = fullTree.getSettings(); + chekList.add(settings); + for (ConfigKeyValue configKeyValue : settings) { + if (configKeyValue.getKey().equals("spark.conf") + && StringUtils.isNotBlank(configKeyValue.getConfigValue())) { + sparkConf = configKeyValue.getConfigValue().trim(); + configKeyValue.setConfigValue(sparkConf); + } + } + } + for (List settings : chekList) { + sparkConfCheck(settings, sparkConf); Integer userLabelId = configurationService.checkAndCreateUserLabel(settings, username, creator); for (ConfigKeyValue setting : settings) { @@ -304,12 +356,39 @@ public Message saveFullTree(HttpServletRequest req, @RequestBody JsonNode json) engineVersion); } }); + configurationService.clearAMCacheConf(username, creator, null, null); } else { configurationService.clearAMCacheConf(username, creator, engine, version); } return Message.ok(); } + private void sparkConfCheck(List settings, String sparkConf) + throws ConfigurationException { + if (StringUtils.isNotBlank(sparkConf)) { + // Check if there are any duplicates in spark. conf + // spark.conf : spark.shuffle.compress=ture;spark.executor.memory=4g + String[] split = sparkConf.split(";"); + int setSize = + Arrays.stream(split).map(s -> s.split("=")[0].trim()).collect(Collectors.toSet()).size(); + int listSize = + Arrays.stream(split).map(s -> s.split("=")[0].trim()).collect(Collectors.toList()).size(); + if (listSize != setSize) { + throw new ConfigurationException("Spark.conf contains duplicate keys"); + } + // Check if there are any duplicates in the spark.conf configuration and other individual + for (String keyValue : split) { + String key = keyValue.split("=")[0].trim(); + boolean matchResult = + settings.stream().anyMatch(settingKey -> key.equals(settingKey.getKey())); + if (matchResult) { + throw new ConfigurationException( + "Saved key is duplicated with the spark conf key , key :" + key); + } + } + } + } + @ApiOperation( value = "listAllEngineType", notes = "list all engine type", @@ -376,7 +455,7 @@ public Message rpcTest( private void checkAdmin(String userName) throws ConfigurationException { if (!org.apache.linkis.common.conf.Configuration.isAdmin(userName)) { - throw new ConfigurationException(ONLY_ADMIN_CAN_MODIFY.getErrorDesc()); + throw new ConfigurationException(ONLY_ADMIN_PERFORM.getErrorDesc()); } } @@ -395,7 +474,7 @@ public Message getKeyValue( @RequestParam(value = "creator", required = false, defaultValue = "*") String creator, @RequestParam(value = "configKey") String configKey) throws ConfigurationException { - String username = ModuleUserUtils.getOperationUser(req, "saveKey"); + String username = ModuleUserUtils.getOperationUser(req, "getKeyValue"); if (engineType.equals("*") && !version.equals("*")) { return Message.error("When engineType is any engine, the version must also be any version"); } @@ -424,17 +503,26 @@ public Message getKeyValue( @RequestMapping(path = "/keyvalue", method = RequestMethod.POST) public Message saveKeyValue(HttpServletRequest req, @RequestBody Map json) throws ConfigurationException { + Message message = Message.ok(); String username = ModuleUserUtils.getOperationUser(req, "saveKey"); String engineType = (String) json.getOrDefault("engineType", "*"); + String user = (String) json.getOrDefault("user", ""); String version = (String) json.getOrDefault("version", "*"); String creator = (String) json.getOrDefault("creator", "*"); String configKey = (String) json.get("configKey"); String value = (String) json.get("configValue"); + boolean force = Boolean.parseBoolean(json.getOrDefault("force", "false").toString()); + if (!org.apache.linkis.common.conf.Configuration.isAdmin(username) && !username.equals(user)) { + return Message.error("Only admin can modify other user configuration data"); + } if (engineType.equals("*") && !version.equals("*")) { return Message.error("When engineType is any engine, the version must also be any version"); } - if (StringUtils.isBlank(configKey) || StringUtils.isBlank(value)) { - return Message.error("key or value cannot be empty"); + if (StringUtils.isBlank(configKey)) { + return Message.error("key cannot be empty"); + } + if (StringUtils.isNotBlank(user)) { + username = user; } List labelList = LabelEntityParser.generateUserCreatorEngineTypeLabelList( @@ -444,9 +532,22 @@ public Message saveKeyValue(HttpServletRequest req, @RequestBody Map json) throws ConfigurationException { - String username = ModuleUserUtils.getOperationUser(req, "saveKey"); + String username = ModuleUserUtils.getOperationUser(req, "deleteKeyValue"); String engineType = (String) json.getOrDefault("engineType", "*"); String version = (String) json.getOrDefault("version", "*"); String creator = (String) json.getOrDefault("creator", "*"); @@ -477,4 +578,226 @@ public Message deleteKeyValue(HttpServletRequest req, @RequestBody Map configValues = configKeyService.deleteConfigValue(configKey, labelList); return Message.ok().data("configValues", configValues); } + + @ApiOperation(value = "getBaseKeyValue", notes = "get key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "key", required = false, dataType = "String", value = "key"), + @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", defaultValue = "1"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + defaultValue = "20"), + }) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.GET) + public Message getBaseKeyValue( + HttpServletRequest req, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "key", required = false) String key, + @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "getBaseKeyValue")); + if (StringUtils.isBlank(engineType)) { + engineType = null; + } + if (StringUtils.isBlank(key)) { + key = null; + } + PageHelper.startPage(pageNow, pageSize); + List list = null; + try { + list = configKeyService.getConfigBykey(engineType, key, req.getHeader("Content-Language")); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(list); + long total = pageInfo.getTotal(); + return Message.ok().data("configKeyList", list).data("totalPage", total); + } + + @ApiOperation(value = "deleteBaseKeyValue", notes = "delete key", response = Message.class) + @ApiImplicitParams({@ApiImplicitParam(name = "id", required = true, dataType = "Integer")}) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.DELETE) + public Message deleteBaseKeyValue(HttpServletRequest req, @RequestParam(value = "id") Integer id) + throws ConfigurationException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "deleteBaseKeyValue ID:" + id)); + configKeyService.deleteConfigById(id); + return Message.ok(); + } + + @ApiOperation(value = "saveBaseKeyValue", notes = "save key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "id", required = false, dataType = "Integer", value = "id"), + @ApiImplicitParam(name = "key", required = true, dataType = "String", value = "key"), + @ApiImplicitParam(name = "name", required = true, dataType = "String", value = "name"), + @ApiImplicitParam( + name = "description", + required = true, + dataType = "String", + value = "description"), + @ApiImplicitParam( + name = "defaultValue", + required = true, + dataType = "String", + value = "defaultValue"), + @ApiImplicitParam( + name = "validateType", + required = true, + dataType = "String", + value = "validateType"), + @ApiImplicitParam( + name = "validateRange", + required = true, + dataType = "String", + value = "validateRange"), + @ApiImplicitParam( + name = "boundaryType", + required = true, + dataType = "String", + value = "boundaryType"), + @ApiImplicitParam(name = "treeName", required = true, dataType = "String", value = "treeName"), + @ApiImplicitParam( + name = "engineType", + required = true, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "enName", required = false, dataType = "String", value = "enName"), + @ApiImplicitParam( + name = "enDescription", + required = false, + dataType = "String", + value = "enDescription"), + @ApiImplicitParam( + name = "enTreeName", + required = false, + dataType = "String", + value = "enTreeName"), + @ApiImplicitParam( + name = "templateRequired", + required = false, + dataType = "String", + value = "1"), + }) + @ApiOperationSupport(ignoreParameters = {"json"}) + @RequestMapping(path = "/baseKeyValue", method = RequestMethod.POST) + public Message saveBaseKeyValue(HttpServletRequest req, @RequestBody ConfigKey configKey) + throws ConfigurationException, InstantiationException, IllegalAccessException { + checkAdmin(ModuleUserUtils.getOperationUser(req, "saveBaseKeyValue")); + String key = configKey.getKey(); + String name = configKey.getName(); + String treeName = configKey.getTreeName(); + String description = configKey.getDescription(); + Integer boundaryType = configKey.getBoundaryType(); + String defaultValue = configKey.getDefaultValue(); + String validateType = configKey.getValidateType(); + String validateRange = configKey.getValidateRange(); + String engineType = configKey.getEngineType(); + if (StringUtils.isBlank(key)) { + return Message.error("key cannot be empty"); + } + if (StringUtils.isBlank(name)) { + return Message.error("name cannot be empty"); + } + if (StringUtils.isBlank(description)) { + return Message.error("description cannot be empty"); + } + if (StringUtils.isBlank(treeName)) { + return Message.error("treeName cannot be empty"); + } + if (StringUtils.isBlank(validateType)) { + return Message.error("validateType cannot be empty"); + } + if (!validateType.equals("None") && StringUtils.isBlank(validateRange)) { + return Message.error("validateRange cannot be empty"); + } + if (null == boundaryType) { + return Message.error("boundaryType cannot be empty"); + } + if (StringUtils.isNotEmpty(defaultValue) + && !validatorManager + .getOrCreateValidator(validateType) + .validate(defaultValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter configValue verification failed(参数defaultValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2},ConfigValue:{3}", + key, validateType, validateRange, defaultValue); + throw new ConfigurationException(msg); + } + if (null == configKey.getId()) { + List configBykey = + configKeyService.getConfigBykey(engineType, key, req.getHeader("Content-Language")); + if (CollectionUtils.isNotEmpty(configBykey)) { + return Message.error("The engine has the same key: " + key); + } + configKeyService.saveConfigKey(configKey); + } else { + configKey.setId(configKey.getId()); + configKeyService.updateConfigKey(configKey); + } + return Message.ok().data("configKey", configKey); + } + + @ApiOperation(value = "getUserkeyvalue", notes = "get key", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "key", required = false, dataType = "String", value = "key"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), + @ApiImplicitParam(name = "user", required = false, dataType = "String", value = "user"), + @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", defaultValue = "1"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + defaultValue = "20"), + }) + @RequestMapping(path = "/userKeyValue", method = RequestMethod.GET) + public Message getUserKeyValue( + HttpServletRequest req, + @RequestParam(value = "engineType", required = false) String engineType, + @RequestParam(value = "key", required = false) String key, + @RequestParam(value = "creator", required = false) String creator, + @RequestParam(value = "user", required = false) String user, + @RequestParam(value = "pageNow", required = false, defaultValue = "1") Integer pageNow, + @RequestParam(value = "pageSize", required = false, defaultValue = "20") Integer pageSize) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "getUserKeyValue"); + if (StringUtils.isBlank(engineType)) { + engineType = null; + } + if (StringUtils.isBlank(key)) { + key = null; + } + if (StringUtils.isBlank(creator)) { + creator = null; + } + if (StringUtils.isBlank(user)) { + user = null; + } + + if (!org.apache.linkis.common.conf.Configuration.isAdmin(username) && !username.equals(user)) { + return Message.error("Only admin can query other user configuration data"); + } + + PageHelper.startPage(pageNow, pageSize); + List list; + try { + list = configKeyService.getUserConfigValue(engineType, key, creator, user); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(list); + long total = pageInfo.getTotal(); + return Message.ok().data("configValueList", list).data("totalPage", total); + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateManagerRestfulApi.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateManagerRestfulApi.java new file mode 100644 index 0000000000..7ed5c4e579 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/restful/api/TemplateManagerRestfulApi.java @@ -0,0 +1,277 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.restful.api; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.JsonUtils; +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.configuration.service.TemplateConfigKeyService; +import org.apache.linkis.server.Message; +import org.apache.linkis.server.utils.ModuleUserUtils; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +import javax.servlet.http.HttpServletRequest; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonNode; +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiImplicitParam; +import io.swagger.annotations.ApiImplicitParams; +import io.swagger.annotations.ApiOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Api(tags = "configuration template") +@RestController +@RequestMapping(path = "/configuration/template") +public class TemplateManagerRestfulApi { + + private static final Logger logger = LoggerFactory.getLogger(TemplateManagerRestfulApi.class); + + @Autowired private TemplateConfigKeyService templateConfigKeyService; + + @ApiOperation( + value = "updateKeyMapping", + notes = "query engineconn info list", + response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "templateUid", + dataType = "String", + required = true, + value = "templateUid"), + @ApiImplicitParam( + name = "templateName", + dataType = "String", + required = true, + value = "engine type"), + @ApiImplicitParam(name = "engineType", dataType = "String", required = true, value = "String"), + @ApiImplicitParam(name = "operator", dataType = "String", value = "operator"), + @ApiImplicitParam(name = "isFullMode", dataType = "Boolbean", value = "isFullMode"), + @ApiImplicitParam(name = "itemList", dataType = "Array", value = "itemList"), + }) + @RequestMapping(path = "/updateKeyMapping", method = RequestMethod.POST) + public Message updateKeyMapping(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "updateKeyMapping"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to updateKeyMapping.", token); + return Message.error("Token:" + token + " has no permission to updateKeyMapping."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to updateKeyMapping.", username); + return Message.error("User:" + username + " has no permission to updateKeyMapping."); + } + + String templateUid = jsonNode.get("templateUid").asText(); + String templateName = jsonNode.get("templateName").asText(); + String engineType = jsonNode.get("engineType").asText(); + String operator = jsonNode.get("operator").asText(); + + if (StringUtils.isBlank(templateUid)) { + return Message.error("parameters:templateUid can not be empty(请求参数【templateUid】不能为空)"); + } + if (StringUtils.isBlank(templateName)) { + return Message.error("parameters:templateName can not be empty(请求参数【templateName】不能为空)"); + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can not be empty(请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(operator)) { + return Message.error("parameters:operator can not be empty(请求参数【operator】不能为空)"); + } + boolean isFullMode = true; + try { + isFullMode = jsonNode.get("isFullMode").asBoolean(); + logger.info("will update by param isFullMode:" + isFullMode); + } catch (Exception e) { + logger.info("will update by default isFullMode:" + isFullMode); + } + + JsonNode itemParms = jsonNode.get("itemList"); + + List confKeyList = new ArrayList<>(); + if (itemParms != null && !itemParms.isNull()) { + try { + confKeyList = + JsonUtils.jackson() + .readValue(itemParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:itemList parsing failed(请求参数【itemList】解析失败), error with:" + e.getMessage()); + } + } else { + return Message.error("parameters:itemList can not be empty(请求参数【itemList】不能为空)"); + } + + logger.info( + "request parameters templateUid:{}, templateName:{}, engineType:{}, operator:{},isFullMode:{}, itemList:[{}]", + templateUid, + templateName, + engineType, + operator, + itemParms.asText()); + + templateConfigKeyService.updateKeyMapping( + templateUid, templateName, engineType, operator, confKeyList, isFullMode); + return Message.ok(); + } + + @ApiOperation(value = "queryKeyInfoList", notes = "query key info list", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "templateUidList", dataType = "Array", value = "templateUidList"), + }) + @RequestMapping(path = "/queryKeyInfoList", method = RequestMethod.POST) + public Message queryKeyInfoList(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "queryKeyInfoList"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to queryKeyInfoList.", token); + return Message.error("Token:" + token + " has no permission to queryKeyInfoList."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to queryKeyInfoList.", username); + return Message.error("User:" + username + " has no permission to queryKeyInfoList."); + } + + JsonNode templateUidListParms = jsonNode.get("templateUidList"); + + List uuidList = new ArrayList<>(); + if (templateUidListParms != null && !templateUidListParms.isNull()) { + try { + uuidList = + JsonUtils.jackson() + .readValue(templateUidListParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:templateUidList parsing failed(请求参数【templateUidList】解析失败), error with:" + + e.getMessage()); + } + } else { + return Message.error( + "parameters:templateUidList can not be empty(请求参数【templateUidList】不能为空)"); + } + + List result = templateConfigKeyService.queryKeyInfoList(uuidList); + + return Message.ok().data("list", result); + } + + @ApiOperation(value = "apply", notes = "apply conf template rule", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam( + name = "templateUid", + dataType = "String", + required = true, + value = "templateUid"), + @ApiImplicitParam(name = "application", dataType = "String", value = "application"), + @ApiImplicitParam(name = "engineType", dataType = "String", value = "engineType"), + @ApiImplicitParam(name = "engineVersion", dataType = "String", value = "engineVersion"), + @ApiImplicitParam(name = "operator", dataType = "String", value = "operator"), + @ApiImplicitParam(name = "userList", dataType = "Array", value = "userList"), + }) + @RequestMapping(path = "/apply", method = RequestMethod.POST) + public Message apply(HttpServletRequest req, @RequestBody JsonNode jsonNode) + throws ConfigurationException { + String username = ModuleUserUtils.getOperationUser(req, "apply"); + String token = ModuleUserUtils.getToken(req); + // check special admin token + if (StringUtils.isNotBlank(token)) { + if (!Configuration.isAdminToken(token)) { + logger.warn("Token:{} has no permission to apply.", token); + return Message.error("Token:" + token + " has no permission to apply."); + } + } else if (!Configuration.isAdmin(username)) { + logger.warn("User:{} has no permission to apply.", username); + return Message.error("User:" + username + " has no permission to apply."); + } + + String templateUid = jsonNode.get("templateUid").asText(); + String application = jsonNode.get("application").asText(); + String engineType = jsonNode.get("engineType").asText(); + String engineVersion = jsonNode.get("engineVersion").asText(); + String operator = jsonNode.get("operator").asText(); + + if (StringUtils.isBlank(templateUid)) { + return Message.error("parameters:templateUid can not be empty(请求参数【templateUid】不能为空)"); + } + if (StringUtils.isBlank(application)) { + return Message.error("parameters:application can not be empty(请求参数【application】不能为空)"); + } + if (StringUtils.isBlank(engineType)) { + return Message.error("parameters:engineType can not be empty(请求参数【engineType】不能为空)"); + } + if (StringUtils.isBlank(engineVersion)) { + return Message.error("parameters:engineVersion can not be empty(请求参数【engineVersion】不能为空)"); + } + if (StringUtils.isBlank(operator)) { + return Message.error("parameters:operator can not be empty(请求参数【operator】不能为空)"); + } + + JsonNode userParms = jsonNode.get("userList"); + List userList = new ArrayList<>(); + if (userParms != null && !userParms.isNull()) { + try { + userList = + JsonUtils.jackson() + .readValue(userParms.toString(), new TypeReference>() {}); + } catch (JsonProcessingException e) { + return Message.error( + "parameters:userList parsing failed(请求参数【userList】解析失败), error with:" + e.getMessage()); + } + } else { + return Message.error("parameters:userList can not be empty(请求参数【userList】不能为空)"); + } + + logger.info( + "request parameters templateUid:{}, application:{}, engineType:{}, engineVersion:{}, operator:{},userList:[{}]", + templateUid, + application, + engineType, + engineVersion, + operator, + String.join(",", userList)); + + Map result = + templateConfigKeyService.apply( + templateUid, application, engineType, engineVersion, operator, userList); + + Message message = Message.ok(); + message.getData().putAll(result); + return message; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java new file mode 100644 index 0000000000..2fff11c871 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/AcrossClusterRuleService.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service; + +import org.apache.linkis.configuration.entity.AcrossClusterRule; + +import java.util.Map; + +public interface AcrossClusterRuleService { + + void deleteAcrossClusterRule(String creator, String username) throws Exception; + + void updateAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception; + + void insertAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception; + + Map queryAcrossClusterRuleList( + String creator, String username, String clusterName, Integer pageNow, Integer pageSize) + throws Exception; + + void validAcrossClusterRule(Long id, String isValid) throws Exception; +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java index 665f359483..758ac9e91d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/ConfigKeyService.java @@ -17,7 +17,9 @@ package org.apache.linkis.configuration.service; +import org.apache.linkis.configuration.entity.ConfigKey; import org.apache.linkis.configuration.entity.ConfigKeyValue; +import org.apache.linkis.configuration.entity.ConfigUserValue; import org.apache.linkis.configuration.entity.ConfigValue; import org.apache.linkis.configuration.exception.ConfigurationException; import org.apache.linkis.manager.label.entity.Label; @@ -32,6 +34,19 @@ ConfigValue saveConfigValue(ConfigKeyValue configKeyValue, List> labelL List getConfigValue(String configKey, List> labelList) throws ConfigurationException; + List getConfigKeyList(String engineType) throws ConfigurationException; + List deleteConfigValue(String configKey, List> labelList) throws ConfigurationException; + + List getConfigBykey(String engineType, String key, String language); + + void deleteConfigById(Integer id); + + ConfigKey saveConfigKey(ConfigKey configKey); + + List getUserConfigValue( + String engineType, String key, String creator, String user); + + void updateConfigKey(ConfigKey configKey); } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java new file mode 100644 index 0000000000..bde686c6d0 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/TemplateConfigKeyService.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitVo; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfRequest; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfResponse; + +import java.util.List; +import java.util.Map; + +public interface TemplateConfigKeyService { + + Boolean updateKeyMapping( + String templateUid, + String templateName, + String engineType, + String operator, + List itemList, + Boolean isFullMode) + throws ConfigurationException; + + List queryKeyInfoList(List uuidList) throws ConfigurationException; + + Map apply( + String templateUid, + String application, + String engineType, + String engineVersion, + String operator, + List userList) + throws ConfigurationException; + + TemplateConfResponse queryKeyInfoList(TemplateConfRequest templateConfRequest); +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java new file mode 100644 index 0000000000..a906ca2d1a --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/AcrossClusterRuleServiceImpl.java @@ -0,0 +1,103 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service.impl; + +import org.apache.linkis.configuration.dao.AcrossClusterRuleMapper; +import org.apache.linkis.configuration.entity.AcrossClusterRule; +import org.apache.linkis.configuration.service.AcrossClusterRuleService; +import org.apache.linkis.governance.common.constant.job.JobRequestConstants; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.util.*; + +import com.github.pagehelper.PageHelper; +import com.github.pagehelper.PageInfo; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class AcrossClusterRuleServiceImpl implements AcrossClusterRuleService { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + @Autowired private AcrossClusterRuleMapper ruleMapper; + + @Override + public void deleteAcrossClusterRule(String creator, String username) throws Exception { + ruleMapper.deleteAcrossClusterRule(creator, username); + } + + @Override + public void updateAcrossClusterRule(AcrossClusterRule newRule) throws Exception { + AcrossClusterRule beforeRule = ruleMapper.getAcrossClusterRule(newRule.getId()); + if (beforeRule == null) { + throw new Exception("acrossClusterRule not exit"); + } + + Date time = new Date(); + newRule.setCreateBy(beforeRule.getCreateBy()); + newRule.setCreateTime(beforeRule.getCreateTime()); + newRule.setUpdateTime(time); + + ruleMapper.updateAcrossClusterRule(newRule); + } + + @Override + public void insertAcrossClusterRule(AcrossClusterRule acrossClusterRule) throws Exception { + Date time = new Date(); + acrossClusterRule.setCreateTime(time); + acrossClusterRule.setUpdateTime(time); + ruleMapper.insertAcrossClusterRule(acrossClusterRule); + } + + @Override + public Map queryAcrossClusterRuleList( + String creator, String username, String clusterName, Integer pageNow, Integer pageSize) { + Map result = new HashMap<>(2); + List acrossClusterRules = null; + if (Objects.isNull(pageNow)) { + pageNow = 1; + } + if (Objects.isNull(pageSize)) { + pageSize = 20; + } + PageHelper.startPage(pageNow, pageSize); + + try { + acrossClusterRules = ruleMapper.queryAcrossClusterRuleList(username, creator, clusterName); + } finally { + PageHelper.clearPage(); + } + PageInfo pageInfo = new PageInfo<>(acrossClusterRules); + result.put("acrossClusterRuleList", acrossClusterRules); + result.put(JobRequestConstants.TOTAL_PAGE(), pageInfo.getTotal()); + return result; + } + + @Override + public void validAcrossClusterRule(Long id, String isValid) throws Exception { + AcrossClusterRule beforeRule = ruleMapper.getAcrossClusterRule(id); + + if (beforeRule == null) { + throw new Exception("acrossClusterRule not exit"); + } + + ruleMapper.validAcrossClusterRule(isValid, id); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java index 6811b5e7e2..0747afc57b 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/ConfigKeyServiceImpl.java @@ -19,10 +19,7 @@ import org.apache.linkis.configuration.dao.ConfigMapper; import org.apache.linkis.configuration.dao.LabelMapper; -import org.apache.linkis.configuration.entity.ConfigKey; -import org.apache.linkis.configuration.entity.ConfigKeyValue; -import org.apache.linkis.configuration.entity.ConfigLabel; -import org.apache.linkis.configuration.entity.ConfigValue; +import org.apache.linkis.configuration.entity.*; import org.apache.linkis.configuration.exception.ConfigurationException; import org.apache.linkis.configuration.service.ConfigKeyService; import org.apache.linkis.configuration.util.LabelEntityParser; @@ -63,9 +60,8 @@ public class ConfigKeyServiceImpl implements ConfigKeyService { public ConfigValue saveConfigValue(ConfigKeyValue configKeyValue, List> labelList) throws ConfigurationException { - if (StringUtils.isBlank(configKeyValue.getConfigValue()) - || StringUtils.isBlank(configKeyValue.getKey())) { - throw new ConfigurationException(KEY_OR_VALUE_CANNOT.getErrorDesc()); + if (StringUtils.isBlank(configKeyValue.getKey())) { + throw new ConfigurationException(KEY_CANNOT_EMPTY.getErrorDesc()); } LabelParameterParser.labelCheck(labelList); @@ -163,6 +159,11 @@ public List getConfigValue(String key, List> labelList) return configValues; } + @Override + public List getConfigKeyList(String engineType) throws ConfigurationException { + return configMapper.selectKeyByEngineType(engineType); + } + @Override public List deleteConfigValue(String key, List> labelList) throws ConfigurationException { @@ -174,4 +175,37 @@ public List deleteConfigValue(String key, List> labelList) logger.info("succeed to remove key: {} by label:{} ", key, combinedLabel.getStringValue()); return configValues; } + + @Override + public List getConfigBykey(String engineType, String key, String language) { + List configkeyList; + if ("en".equals(language)) { + configkeyList = configMapper.getConfigEnBykey(engineType, key); + } else { + configkeyList = configMapper.getConfigBykey(engineType, key); + } + return configkeyList; + } + + @Override + public void deleteConfigById(Integer id) { + configMapper.deleteConfigKey(id); + } + + @Override + public ConfigKey saveConfigKey(ConfigKey configKey) { + configMapper.insertKeyByBase(configKey); + return null; + } + + @Override + public List getUserConfigValue( + String engineType, String key, String creator, String user) { + return configMapper.getUserConfigValue(key, user, creator, engineType); + } + + @Override + public void updateConfigKey(ConfigKey configKey) { + configMapper.updateConfigKey(configKey); + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java new file mode 100644 index 0000000000..cc10066a10 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TemplateConfigKeyServiceImpl.java @@ -0,0 +1,500 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.service.impl; + +import org.apache.linkis.configuration.dao.ConfigKeyLimitForUserMapper; +import org.apache.linkis.configuration.dao.ConfigMapper; +import org.apache.linkis.configuration.dao.LabelMapper; +import org.apache.linkis.configuration.dao.TemplateConfigKeyMapper; +import org.apache.linkis.configuration.entity.*; +import org.apache.linkis.configuration.enumeration.BoundaryTypeEnum; +import org.apache.linkis.configuration.exception.ConfigurationException; +import org.apache.linkis.configuration.service.ConfigKeyService; +import org.apache.linkis.configuration.service.ConfigurationService; +import org.apache.linkis.configuration.service.TemplateConfigKeyService; +import org.apache.linkis.configuration.util.LabelEntityParser; +import org.apache.linkis.configuration.validate.ValidatorManager; +import org.apache.linkis.governance.common.entity.TemplateConfKey; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfRequest; +import org.apache.linkis.governance.common.protocol.conf.TemplateConfResponse; +import org.apache.linkis.manager.label.entity.CombinedLabel; +import org.apache.linkis.rpc.message.annotation.Receiver; + +import org.apache.commons.lang3.StringUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.TransactionStatus; +import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.support.DefaultTransactionDefinition; + +import java.text.MessageFormat; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class TemplateConfigKeyServiceImpl implements TemplateConfigKeyService { + + private static final Logger logger = LoggerFactory.getLogger(TemplateConfigKeyServiceImpl.class); + + @Autowired private ConfigMapper configMapper; + + @Autowired private LabelMapper labelMapper; + + @Autowired private TemplateConfigKeyMapper templateConfigKeyMapper; + + @Autowired private ConfigurationService configurationService; + + @Autowired private ConfigKeyService configKeyService; + + @Autowired private ValidatorManager validatorManager; + + @Autowired private ConfigKeyLimitForUserMapper configKeyLimitForUserMapper; + + @Autowired private PlatformTransactionManager platformTransactionManager; + + @Override + @Transactional + public Boolean updateKeyMapping( + String templateUid, + String templateName, + String engineType, + String operator, + List itemList, + Boolean isFullMode) + throws ConfigurationException { + + // Query the corresponding data and check the validity of the data(查询对应的数据 并做数据合法性检查) + List keyList = itemList.stream().map(e -> e.getKey()).collect(Collectors.toList()); + List configKeyList = + configMapper.selectKeyByEngineTypeAndKeyList(engineType, keyList); + // List of key ids to be updated(待更新的key id 列表) + List keyIdList = configKeyList.stream().map(e -> e.getId()).collect(Collectors.toList()); + if (configKeyList.size() != itemList.size()) { + List dbKeyList = + configKeyList.stream().map(e -> e.getKey()).collect(Collectors.toList()); + String msg = + MessageFormat.format( + "The num of configuration item data from the DB is inconsistent with input(从DB中获取到的配置数据条数不一致) :" + + "engineType:{0}, input keys:{1}, db keys:{2}", + engineType, String.join(",", keyList), String.join(",", dbKeyList)); + throw new ConfigurationException(msg); + } + + List toUpdateOrInsertList = new ArrayList<>(); + + // map k:v---> key:ConfigKey + Map configKeyMap = + configKeyList.stream().collect(Collectors.toMap(ConfigKey::getKey, item -> item)); + for (ConfigKeyLimitVo item : itemList) { + + String key = item.getKey(); + ConfigKey temp = configKeyMap.get(item.getKey()); + String validateType = temp.getValidateType(); + String validateRange = temp.getValidateRange(); + String configValue = item.getConfigValue(); + String maxValue = item.getMaxValue(); + + if (StringUtils.isNotEmpty(configValue) + && !validatorManager + .getOrCreateValidator(validateType) + .validate(configValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter configValue verification failed(参数configValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2},ConfigValue:{3}", + key, validateType, validateRange, configValue); + throw new ConfigurationException(msg); + } + + if (StringUtils.isNotEmpty(maxValue) + && BoundaryTypeEnum.WITH_BOTH.getId().equals(temp.getBoundaryType())) { + if (!validatorManager + .getOrCreateValidator(validateType) + .validate(maxValue, validateRange)) { + String msg = + MessageFormat.format( + "Parameter maxValue verification failed(参数maxValue校验失败):" + + "key:{0}, ValidateType:{1}, ValidateRange:{2}, maxValue:{3}", + key, validateType, validateRange, maxValue); + throw new ConfigurationException(msg); + } + + try { + Integer maxVal = Integer.valueOf(maxValue.replaceAll("[^0-9]", "")); + Integer configVal = Integer.valueOf(configValue.replaceAll("[^0-9]", "")); + if (configVal > maxVal) { + String msg = + MessageFormat.format( + "Parameter key:{0},config value:{1} verification failed, " + + "exceeds the specified max value: {2}:(参数校验失败,超过指定的最大值):", + key, configVal, maxVal); + throw new ConfigurationException(msg); + } + } catch (Exception exception) { + if (exception instanceof ConfigurationException) { + throw exception; + } else { + logger.warn( + "Failed to check special limit setting for key:" + + key + + ",config value:" + + configValue); + } + } + } + ; + + Long keyId = temp.getId(); + + TemplateConfigKey templateConfigKey = new TemplateConfigKey(); + templateConfigKey.setTemplateName(templateName); + templateConfigKey.setTemplateUuid(templateUid); + templateConfigKey.setKeyId(keyId); + templateConfigKey.setConfigValue(configValue); + templateConfigKey.setMaxValue(maxValue); + templateConfigKey.setCreateBy(operator); + templateConfigKey.setUpdateBy(operator); + toUpdateOrInsertList.add(templateConfigKey); + } + // Update data according to different mode + if (isFullMode) { + // The data previously in the database needs to be removed + List oldList = + templateConfigKeyMapper.selectListByTemplateUuid(templateUid); + List needToRemoveList = + oldList.stream() + .filter( + item -> { + return !keyIdList.contains(item.getKeyId()); + }) + .map(e -> e.getKeyId()) + .collect(Collectors.toList()); + if (needToRemoveList.size() > 0) { + logger.info( + "Try to remove old data:[" + needToRemoveList + "] for templateUid:" + templateUid); + templateConfigKeyMapper.deleteByTemplateUuidAndKeyIdList(templateUid, needToRemoveList); + } + } + + if (toUpdateOrInsertList.size() == 0) { + String msg = "No key data to update, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + templateConfigKeyMapper.batchInsertOrUpdateList(toUpdateOrInsertList); + + return true; + } + + @Override + public List queryKeyInfoList(List uuidList) throws ConfigurationException { + List result = new ArrayList<>(); + + List templateConfigKeyList = + templateConfigKeyMapper.selectListByTemplateUuidList(uuidList); + + Map> templateConfigKeyListGroupByUuid = + templateConfigKeyList.stream() + .collect(Collectors.groupingBy(TemplateConfigKey::getTemplateUuid)); + + List keyIdList = + templateConfigKeyList.stream() + .map(e -> e.getKeyId()) + .distinct() + .collect(Collectors.toList()); + + if (keyIdList.size() == 0) { + String msg = "can not get any config key info from db, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + List configKeyList = configMapper.selectKeyByKeyIdList(keyIdList); + // map k:v---> keyId:ConfigKey + Map configKeyMap = + configKeyList.stream().collect(Collectors.toMap(ConfigKey::getId, item -> item)); + + for (String uuid : templateConfigKeyListGroupByUuid.keySet()) { + Map item = new HashMap(); + List keys = new ArrayList<>(); + item.put("templateUid", uuid); + + String engineType = ""; + List engineTypeList = templateConfigKeyMapper.selectEngineTypeByTemplateUuid(uuid); + + if (engineTypeList.size() > 1) { + String msg = + MessageFormat.format( + "template uuid:{0} associated with the engine type:{1} more than one! Please check if the keys are correct", + uuid, StringUtils.join(engineTypeList.toArray(), ",")); + throw new ConfigurationException(msg); + } + + if (engineTypeList.size() == 0) { + String msg = + MessageFormat.format( + "template uuid:{0} can not associated with any engine type! Please check if the keys are correct", + uuid); + throw new ConfigurationException(msg); + } + + engineType = engineTypeList.get(0); + + Map templateConfigKeyMap = + templateConfigKeyListGroupByUuid.get(uuid).stream() + .collect(Collectors.toMap(TemplateConfigKey::getKeyId, elemt -> elemt)); + + List ecKeyList = configKeyService.getConfigKeyList(engineType); + for (ConfigKey configKey : ecKeyList) { + Map temp = new HashMap<>(); + temp.put("key", configKey.getKey()); + temp.put("name", configKey.getName()); + temp.put("description", configKey.getDescription()); + temp.put("engineType", configKey.getEngineType()); + temp.put("validateType", configKey.getValidateType()); + temp.put("validateRange", configKey.getValidateRange()); + temp.put("boundaryType", configKey.getBoundaryType()); + temp.put("defaultValue", configKey.getDefaultValue()); + temp.put("require", configKey.getTemplateRequired()); + temp.put("keyId", configKey.getId()); + + Long keyId = configKey.getId(); + TemplateConfigKey templateConfigKey = templateConfigKeyMap.get(keyId); + + if (templateConfigKey == null) { + temp.put("configValue", null); + temp.put("maxValue", null); + temp.put("createBy", null); + temp.put("createTime", null); + temp.put("updateBy", null); + temp.put("updateTime", null); + } else { + temp.put("configValue", templateConfigKey.getConfigValue()); + temp.put("maxValue", templateConfigKey.getMaxValue()); + temp.put("createBy", templateConfigKey.getCreateBy()); + temp.put("createTime", templateConfigKey.getCreateTime()); + temp.put("updateBy", templateConfigKey.getUpdateBy()); + temp.put("updateTime", templateConfigKey.getUpdateTime()); + } + + keys.add(temp); + } + + item.put("itemList", keys); + result.add(item); + } + return result; + } + + @Override + public Map apply( + String templateUid, + String application, + String engineType, + String engineVersion, + String operator, + List userList) + throws ConfigurationException { + List successList = new ArrayList<>(); + List errorList = new ArrayList<>(); + + // get the associated config itsm list + List templateUuidList = new ArrayList<>(); + templateUuidList.add(templateUid); + List templateConfigKeyList = + templateConfigKeyMapper.selectListByTemplateUuidList(templateUuidList); + if (templateConfigKeyList.size() == 0) { + String msg = + MessageFormat.format( + "The template configuration is empty. Please check the template associated configuration information in the database table" + + "(模板关联的配置为空,请检查数据库表中关于模板id:{0} 关联配置项是否完整)", + templateUid); + throw new ConfigurationException(msg); + } + // check input engineType is same as template key engineType + List keyIdList = + templateConfigKeyList.stream() + .map(e -> e.getKeyId()) + .distinct() + .collect(Collectors.toList()); + + if (keyIdList.size() == 0) { + String msg = "can not get any config key info from db, Please check if the keys are correct"; + throw new ConfigurationException(msg); + } + List configKeyList = configMapper.selectKeyByKeyIdList(keyIdList); + // map k:v---> keyId:ConfigKey + Set configKeyEngineTypeSet = + configKeyList.stream().map(ConfigKey::getEngineType).collect(Collectors.toSet()); + + if (configKeyEngineTypeSet == null || configKeyEngineTypeSet.size() == 0) { + String msg = + MessageFormat.format( + "Unable to get configuration parameter information associated with template id:{0}, please check whether the parameters are correct" + + "(无法获取模板:{0} 关联的配置参数信息,请检查参数是否正确)", + templateUid); + throw new ConfigurationException(msg); + } + + if (configKeyEngineTypeSet.size() != 1 || !configKeyEngineTypeSet.contains(engineType)) { + String msg = + MessageFormat.format( + "The engineType:{0} associated with the template:{1} does not match the input engineType:{2}, please check whether the parameters are correct" + + "(模板关联的引擎类型:{0} 和下发的引擎类型:{2} 不匹配,请检查参数是否正确)", + String.join(",", configKeyEngineTypeSet), templateUid, engineType); + throw new ConfigurationException(msg); + } + for (String user : userList) { + // try to create combined_userCreator_engineType label for user + Map res = new HashMap(); + res.put("user", user); + try { + CombinedLabel combinedLabel = + configurationService.generateCombinedLabel( + engineType, engineVersion, user, application); + String conbinedLabelKey = combinedLabel.getLabelKey(); + String conbinedLabelStringValue = combinedLabel.getStringValue(); + // check lable is ok + + ConfigLabel configLabel = + labelMapper.getLabelByKeyValue(conbinedLabelKey, conbinedLabelStringValue); + if (null == configLabel || configLabel.getId() < 0) { + configLabel = LabelEntityParser.parseToConfigLabel(combinedLabel); + labelMapper.insertLabel(configLabel); + logger.info("succeed to create label: {}", configLabel.getStringValue()); + } + + // batch update config value + List configValues = new ArrayList<>(); + + List configKeyLimitForUsers = new ArrayList<>(); + + for (TemplateConfigKey templateConfigKey : templateConfigKeyList) { + Long keyId = templateConfigKey.getKeyId(); + String uuid = templateConfigKey.getTemplateUuid(); + String confVal = templateConfigKey.getConfigValue(); + String maxVal = templateConfigKey.getMaxValue(); + + ConfigValue configValue = new ConfigValue(); + configValue.setConfigKeyId(keyId); + configValue.setConfigValue(confVal); + configValue.setConfigLabelId(configLabel.getId()); + configValues.add(configValue); + + ConfigKeyLimitForUser configKeyLimitForUser = new ConfigKeyLimitForUser(); + configKeyLimitForUser.setUserName(user); + configKeyLimitForUser.setCombinedLabelValue(configLabel.getStringValue()); + configKeyLimitForUser.setKeyId(keyId); + configKeyLimitForUser.setConfigValue(confVal); + configKeyLimitForUser.setMaxValue(maxVal); + configKeyLimitForUser.setLatestUpdateTemplateUuid(uuid); + configKeyLimitForUser.setCreateBy(operator); + configKeyLimitForUser.setUpdateBy(operator); + configKeyLimitForUsers.add(configKeyLimitForUser); + } + + if (configValues.size() == 0) { + res.put("msg", "can not get any right key form the db"); + errorList.add(res); + } else { + + DefaultTransactionDefinition transactionDefinition = new DefaultTransactionDefinition(); + TransactionStatus status = + platformTransactionManager.getTransaction(transactionDefinition); + try { + configMapper.batchInsertOrUpdateValueList(configValues); + // batch update user ConfigKeyLimitForUserMapper + configKeyLimitForUserMapper.batchInsertOrUpdateList(configKeyLimitForUsers); + + platformTransactionManager.commit(status); // commit transaction if everything's fine + } catch (Exception ex) { + platformTransactionManager.rollback( + status); // rollback transaction if any error occurred + throw ex; + } + successList.add(res); + } + + } catch (Exception e) { + logger.warn("try to update configurations for user:" + user + " with error", e); + res.put("msg", e.getMessage()); + errorList.add(res); + } + } + + Map result = new HashMap<>(); + + Map successResult = new HashMap<>(); + Map errorResult = new HashMap<>(); + + successResult.put("num", successList.size()); + successResult.put("infoList", successList); + + errorResult.put("num", errorList.size()); + errorResult.put("infoList", errorList); + + result.put("success", successResult); + result.put("error", errorResult); + return result; + } + + @Receiver + @Override + public TemplateConfResponse queryKeyInfoList(TemplateConfRequest templateConfRequest) { + TemplateConfResponse result = new TemplateConfResponse(); + String templateUid = templateConfRequest.getTemplateUuid(); + String templateName = templateConfRequest.getTemplateName(); + if (logger.isDebugEnabled()) { + logger.debug("query conf list with uid:{},name:{}", templateUid, templateName); + } + if (StringUtils.isBlank(templateUid) && StringUtils.isBlank(templateName)) { + return result; + } + + List voList = new ArrayList<>(); + + if (StringUtils.isNotBlank(templateUid)) { + voList = templateConfigKeyMapper.selectInfoListByTemplateUuid(templateUid); + + } else { + voList = templateConfigKeyMapper.selectInfoListByTemplateName(templateName); + } + List data = new ArrayList<>(); + if (voList != null) { + for (TemplateConfigKeyVO temp : voList) { + TemplateConfKey item = new TemplateConfKey(); + item.setTemplateUuid(temp.getTemplateUuid()); + item.setKey(temp.getKey()); + item.setTemplateName(temp.getTemplateName()); + item.setConfigValue(temp.getConfigValue()); + data.add(item); + if (logger.isDebugEnabled()) { + logger.debug("query conf item={}", item); + } + } + } + result.setList(data); + return result; + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java index bf9755a307..df64521ad4 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/service/impl/TenantConfigServiceImpl.java @@ -124,35 +124,35 @@ public void createTenant(TenantVo tenantVo) throws ConfigurationException { } private void dataProcessing(TenantVo tenantVo) throws ConfigurationException { + AtomicReference tenantResult = new AtomicReference<>(false); + // Obtain the tenant information of the ECM list + Map ecmListResult = null; + try { + ecmListResult = HttpsUtil.sendHttp(null, null); + logger.info("Request ecm list response {}:", ecmListResult); + } catch (IOException e) { + logger.warn("failed to get ecmResource data"); + } + Map>> data = MapUtils.getMap(ecmListResult, "data"); + List> emNodeVoList = data.get("EMs"); + // Compare ECM list tenant labels for task + emNodeVoList.forEach( + ecmInfo -> { + List> labels = (List>) ecmInfo.get("labels"); + labels.stream() + .filter(labelmap -> labelmap.containsKey("tenant")) + .forEach( + map -> { + String tenant = map.get("tenant").toString().toLowerCase(); + if (tenant.equals(tenantVo.getTenantValue().toLowerCase())) { + tenantResult.set(true); + } + }); + }); + // Compare the value of ecm tenant + if (!tenantResult.get()) + throw new ConfigurationException("The ECM with the corresponding label was not found"); if (!tenantVo.getCreator().equals("*")) { - AtomicReference tenantResult = new AtomicReference<>(false); - // Obtain the tenant information of the ECM list - Map ecmListResult = null; - try { - ecmListResult = HttpsUtil.sendHttp(null, null); - logger.info("Request ecm list response {}:", ecmListResult); - } catch (IOException e) { - logger.warn("failed to get ecmResource data"); - } - Map>> data = MapUtils.getMap(ecmListResult, "data"); - List> emNodeVoList = data.get("EMs"); - // Compare ECM list tenant labels for task - emNodeVoList.forEach( - ecmInfo -> { - List> labels = (List>) ecmInfo.get("labels"); - labels.stream() - .filter(labelmap -> labelmap.containsKey("tenant")) - .forEach( - map -> { - String tenant = map.get("tenant").toString().toLowerCase(); - if (tenant.equals(tenantVo.getTenantValue().toLowerCase())) { - tenantResult.set(true); - } - }); - }); - // Compare the value of ecm tenant - if (!tenantResult.get()) - throw new ConfigurationException("The ECM with the corresponding label was not found"); // The beginning of tenantValue needs to contain creator String creator = tenantVo.getCreator().toLowerCase(); String[] tenantArray = tenantVo.getTenantValue().toLowerCase().split("_"); diff --git a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java index ed80f09a0a..2d3f9b2008 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java +++ b/linkis-public-enhancements/linkis-configuration/src/main/java/org/apache/linkis/configuration/util/CommonUtils.java @@ -17,6 +17,16 @@ package org.apache.linkis.configuration.util; +import org.apache.linkis.server.BDPJettyServerHelper; + +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import static org.apache.linkis.configuration.conf.AcrossClusterRuleKeys.*; + public class CommonUtils { public static boolean ipCheck(String str) { if (str != null && !str.isEmpty()) { @@ -28,4 +38,32 @@ public static boolean ipCheck(String str) { } return false; } + + public static String ruleMap2String( + String startTime, + String endTime, + String CPUThreshold, + String MemoryThreshold, + String CPUPercentageThreshold, + String MemoryPercentageThreshold) + throws JsonProcessingException { + Map queueRuleMap = new HashMap<>(); + Map timeRuleMap = new HashMap<>(); + Map thresholdRuleMap = new HashMap<>(); + Map ruleMap = new HashMap<>(); + queueRuleMap.put(KEY_QUEUE_SUFFIX, KEY_ACROSS_CLUSTER_QUEUE_SUFFIX); + timeRuleMap.put(KEY_START_TIME, startTime); + timeRuleMap.put(KEY_END_TIME, endTime); + thresholdRuleMap.put(KEY_CPU_THRESHOLD, CPUThreshold); + thresholdRuleMap.put(KEY_MEMORY_THRESHOLD, MemoryThreshold); + thresholdRuleMap.put(KEY_CPU_PERCENTAGE_THRESHOLD, CPUPercentageThreshold); + thresholdRuleMap.put(KEY_MEMORY_PERCENTAGE_THRESHOLD, MemoryPercentageThreshold); + ruleMap.put(KEY_QUEUE_RULE, queueRuleMap); + ruleMap.put(KEY_TIME_RULE, timeRuleMap); + ruleMap.put(KEY_THRESHOLD_RULE, thresholdRuleMap); + ObjectMapper map2Json = BDPJettyServerHelper.jacksonJson(); + String rules = map2Json.writeValueAsString(ruleMap); + + return rules; + } } diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml index cc92785262..78035f139d 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/common/LabelMapper.xml @@ -47,7 +47,17 @@ + + @@ -57,6 +67,17 @@ #{labelKey}, #{stringValue}, #{feature}, #{labelValueSize}, now(), now()) + + + INSERT IGNORE INTO linkis_cg_manager_label( + label_key, label_value,label_feature, label_value_size, update_time, create_time) + VALUES + + #{item.labelKey}, #{item.stringValue}, #{item.feature}, #{item.labelValueSize}, now(), now() + + + + DELETE FROM linkis_cg_manager_label WHERE id IN diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/AcrossClusterRuleMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/AcrossClusterRuleMapper.xml new file mode 100644 index 0000000000..2d6c1898a9 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/AcrossClusterRuleMapper.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + id,cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + + + + + + INSERT INTO + linkis_ps_configutation_lm_across_cluster_rule () + VALUES + (#{acrossClusterRule.clusterName},#{acrossClusterRule.creator}, #{acrossClusterRule.username}, + + + #{acrossClusterRule.createTime} + + + now() + + , + #{acrossClusterRule.createBy}, + + + #{acrossClusterRule.updateTime} + + + now() + + , + #{acrossClusterRule.updateBy},#{acrossClusterRule.rules}, #{acrossClusterRule.isValid}) + + + + DELETE + FROM + `linkis_ps_configutation_lm_across_cluster_rule` + WHERE + creator = #{creator} AND username = #{username} + + + + UPDATE + `linkis_ps_configutation_lm_across_cluster_rule` + SET + cluster_name = #{acrossClusterRule.clusterName}, creator = #{acrossClusterRule.creator}, + username=#{acrossClusterRule.username}, create_time=#{acrossClusterRule.createTime}, + create_By=#{acrossClusterRule.createBy}, + + + update_time=#{acrossClusterRule.updateTime} + + + update_time = now() + + , + update_By=#{acrossClusterRule.updateBy}, rules=#{acrossClusterRule.rules}, + is_valid=#{acrossClusterRule.isValid} + WHERE + id = #{acrossClusterRule.id} + + + + UPDATE + `linkis_ps_configutation_lm_across_cluster_rule` + SET + is_valid = #{isValid} + WHERE + id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigKeyLimitForUserMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigKeyLimitForUserMapper.xml new file mode 100644 index 0000000000..74d1749105 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigKeyLimitForUserMapper.xml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + id, user_name, combined_label_value, key_id, config_value, max_value, min_value, latest_update_template_uuid, is_valid, + create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_key_limit_for_user ( + id, user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + is_valid, create_by, create_time, update_by, + update_time) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, + #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.isValid,jdbcType=VARCHAR}, #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, + now()) + + + + + update linkis_ps_configuration_key_limit_for_user + set user_name = #{userName,jdbcType=VARCHAR}, + combined_label_value = #{combinedLabelValue,jdbcType=VARCHAR}, + key_id = #{keyId,jdbcType=BIGINT}, + config_value = #{configValue,jdbcType=VARCHAR}, + max_value = #{maxValue,jdbcType=VARCHAR}, + min_value = #{minValue,jdbcType=VARCHAR}, + latest_update_template_uuid = #{latestUpdateTemplateUuid,jdbcType=VARCHAR}, + is_valid = #{isValid,jdbcType=VARCHAR}, + create_by = #{createBy,jdbcType=VARCHAR}, + update_by = #{updateBy,jdbcType=VARCHAR}, + update_time = now() + where id = #{id,jdbcType=BIGINT} + + + + + + insert into linkis_ps_configuration_key_limit_for_user (user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + create_by, create_time, update_by, + update_time) + values + + ( + #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + max_value =values(max_value), + latest_update_template_uuid =values(latest_update_template_uuid), + update_by =values(update_by), + update_time= now() + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml index d282d1cb27..1fb4d179e6 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/ConfigMapper.xml @@ -24,7 +24,6 @@ - @@ -32,8 +31,7 @@ - - + @@ -51,8 +49,9 @@ - - + + + @@ -78,9 +77,20 @@ + + + + + + + + + + + - `id`, `key`, `description`, `name`, `engine_conn_type`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName` + `id`, `key`, `description`, `name`, `engine_conn_type`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type` @@ -93,7 +103,7 @@ - k.id, k.key,COALESCE(k.en_description, k.description) as description, + k.id, k.`key`,COALESCE(k.en_description, k.description) as description, COALESCE(k.en_name, k.name) as name, k.engine_conn_type, k.default_value, k.validate_type, k.validate_range, k.is_hidden, k.is_advanced, k.level,COALESCE(k.en_treeName, k.treeName) as treeName, @@ -113,9 +123,19 @@ INSERT INTO linkis_ps_configuration_config_key ( - id, `key`, description, name, engine_conn_type, default_value, validate_type, validate_range, is_hidden, is_advanced, level, treeName) + `id`, `key`, `description`, + `name`, `engine_conn_type`, `default_value`, + `validate_type`, `validate_range`, `is_hidden`, + `is_advanced`, `level`, `treeName`, + `boundary_type`, `en_name`, `en_treeName`, + `en_description`) VALUES ( - #{id}, #{key}, #{description}, #{name}, #{engineType}, #{defaultValue}, #{validateType}, #{validateRange}, #{isHidden}, #{isAdvanced}, #{level}, #{treeName}) + #{id}, #{key}, #{description}, + #{name}, #{engineType}, #{defaultValue}, + #{validateType}, #{validateRange}, #{isHidden}, + #{isAdvanced}, #{level}, #{treeName}, + #{boundaryType}, #{enName}, #{enTreeName}, + #{enDescription}) + + + + + + + + + + + + + + SELECT * FROM linkis_ps_configuration_config_value WHERE id = #{id} @@ -257,4 +365,121 @@ WHERE id = #{categoryId} + + DELETE FROM linkis_ps_configuration_config_key + WHERE id = #{id} + + + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_key ( + `key`, `description`, `name`, + `engine_conn_type`, `default_value`, `validate_type`, + `validate_range`, `is_hidden`, `is_advanced`, + `level`, `treeName`, `boundary_type`, + `en_name`, `en_treeName`, `en_description`, + `template_required` + ) + VALUES ( + #{key}, #{description}, #{name}, + #{engineType}, #{defaultValue}, #{validateType}, + #{validateRange}, #{isHidden}, #{isAdvanced}, + #{level}, #{treeName}, #{boundaryType}, + #{enName}, #{enTreeName}, #{enDescription}, + #{templateRequired} + ) + + + + UPDATE linkis_ps_configuration_config_key + + `key` = #{key}, + `name` = #{name}, + `description` = #{description}, + `engine_conn_type` = #{engineType}, + `default_value` = #{defaultValue}, + `validate_type` = #{validateType}, + `validate_range` = #{validateRange}, + `validate_range` = #{validateRange}, + `treeName` = #{treeName}, + `boundary_type` = #{boundaryType}, + `en_name` = #{enName}, + `en_treeName` = #{enTreeName}, + `en_description` = #{enDescription}, + `template_required` = #{templateRequired}, + + WHERE id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/TemplateConfigKeyMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/TemplateConfigKeyMapper.xml new file mode 100644 index 0000000000..50a11607c6 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/mysql/TemplateConfigKeyMapper.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, template_name, template_uuid, key_id, config_value, max_value, min_value, validate_range, + is_valid, create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_template_config_key (id, template_name, template_uuid, + key_id, config_value, max_value, + min_value, validate_range, is_valid, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.validateRange,jdbcType=VARCHAR}, #{item.isValid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, now() + ) + + + + + + + delete + from linkis_ps_configuration_template_config_key + where template_uuid = #{templateUuid,jdbcType=VARCHAR} + and key_id in + + #{item} + + + + + insert into linkis_ps_configuration_template_config_key (template_name, template_uuid, + key_id, config_value, max_value, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + template_name =values(template_name), + config_value =values(config_value), + max_value =values(max_value), + update_by=values(update_by), + update_time= now() + + + + + + + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml new file mode 100644 index 0000000000..2245dbae35 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/AcrossClusterRuleMapper.xml @@ -0,0 +1,130 @@ + + + + + + + + + + + + + + + + + + + + + + id,cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + cluster_name,creator,username,create_time,create_by,update_time,update_by,rules,is_valid + + + + + + + + + INSERT INTO + linkis_ps_configutation_lm_across_cluster_rule () + VALUES + (#{acrossClusterRule.clusterName},#{acrossClusterRule.creator}, #{acrossClusterRule.username}, + + + #{acrossClusterRule.createTime} + + + now() + + , + #{acrossClusterRule.createBy}, + + + #{acrossClusterRule.updateTime} + + + now() + + , + #{acrossClusterRule.updateBy},#{acrossClusterRule.rules}, #{acrossClusterRule.isValid}) + + + + DELETE + FROM + "linkis_ps_configutation_lm_across_cluster_rule" + WHERE + creator = #{creator} AND username = #{username} + + + + UPDATE + "linkis_ps_configutation_lm_across_cluster_rule" + SET + cluster_name = #{acrossClusterRule.clusterName}, creator = #{acrossClusterRule.creator}, + username=#{acrossClusterRule.username}, create_time=#{acrossClusterRule.createTime}, + create_By=#{acrossClusterRule.createBy}, + + + update_time=#{acrossClusterRule.updateTime} + + + update_time = now() + + , + update_By=#{acrossClusterRule.updateBy}, rules=#{acrossClusterRule.rules}, + is_valid=#{acrossClusterRule.isValid} + WHERE + id = #{acrossClusterRule.id} + + + + UPDATE + "linkis_ps_configutation_lm_across_cluster_rule" + SET + is_valid = #{isValid} + WHERE + id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml new file mode 100644 index 0000000000..74d1749105 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigKeyLimitForUserMapper.xml @@ -0,0 +1,145 @@ + + + + + + + + + + + + + + + + + + + + + id, user_name, combined_label_value, key_id, config_value, max_value, min_value, latest_update_template_uuid, is_valid, + create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_key_limit_for_user ( + id, user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + is_valid, create_by, create_time, update_by, + update_time) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, + #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.isValid,jdbcType=VARCHAR}, #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, + now()) + + + + + update linkis_ps_configuration_key_limit_for_user + set user_name = #{userName,jdbcType=VARCHAR}, + combined_label_value = #{combinedLabelValue,jdbcType=VARCHAR}, + key_id = #{keyId,jdbcType=BIGINT}, + config_value = #{configValue,jdbcType=VARCHAR}, + max_value = #{maxValue,jdbcType=VARCHAR}, + min_value = #{minValue,jdbcType=VARCHAR}, + latest_update_template_uuid = #{latestUpdateTemplateUuid,jdbcType=VARCHAR}, + is_valid = #{isValid,jdbcType=VARCHAR}, + create_by = #{createBy,jdbcType=VARCHAR}, + update_by = #{updateBy,jdbcType=VARCHAR}, + update_time = now() + where id = #{id,jdbcType=BIGINT} + + + + + + insert into linkis_ps_configuration_key_limit_for_user (user_name, combined_label_value, + key_id, config_value, max_value, min_value, + latest_update_template_uuid, + create_by, create_time, update_by, + update_time) + values + + ( + #{item.userName,jdbcType=VARCHAR}, #{item.combinedLabelValue,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.latestUpdateTemplateUuid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + max_value =values(max_value), + latest_update_template_uuid =values(latest_update_template_uuid), + update_by =values(update_by), + update_time= now() + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml index 45b51f969a..1a84794a05 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/ConfigMapper.xml @@ -1,4 +1,4 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_value( config_key_id, config_value, config_label_id, create_time, update_time) @@ -194,16 +273,40 @@ - - INSERT INTO linkis_ps_configuration_config_value( - config_key_id, config_value, config_label_id, create_time, update_time) + + REPLACE INTO linkis_ps_configuration_config_value( + id, config_key_id, config_value, config_label_id, create_time, update_time) VALUES ( - #{configKeyId},#{configValue}, #{configLabelId}, now(), now()) - on conflict(config_key_id, config_label_id) - do update set - config_value=EXCLUDED.config_value, - update_time=EXCLUDED.update_time, - create_time=EXCLUDED.create_time + #{id},#{configKeyId},#{configValue}, #{configLabelId}, now(), now()) + + + + INSERT INTO linkis_ps_configuration_config_value(config_key_id, config_value, config_label_id, create_time, update_time) + VALUES + + ( + #{item.configKeyId},#{item.configValue}, #{item.configLabelId}, + + + #{item.createTime}, + + + now(), + + + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + config_value =values(config_value), + update_time= now() + SELECT + "id", "key", "description", "name" , + "engine_conn_type", "default_value", "validate_type", "validate_range", + "is_hidden", "is_advanced", "level", "treeName", "boundary_type", "template_required" + FROM linkis_ps_configuration_config_key + + "engine_conn_type" = #{engineType} + and "key" like concat('%',#{key},'%') + + ORDER BY engine_conn_type + + + + + + + + + + INSERT INTO linkis_ps_configuration_config_key ( + "key", "description", "name", + "engine_conn_type", "default_value", "validate_type", + "validate_range", "is_hidden", "is_advanced", + "level", "treeName", "boundary_type", + "en_name", "en_treeName", "en_description", + "template_required" + ) + VALUES ( + #{key}, #{description}, #{name}, + #{engineType}, #{defaultValue}, #{validateType}, + #{validateRange}, #{isHidden}, #{isAdvanced}, + #{level}, #{treeName}, #{boundaryType}, + #{enName}, #{enTreeName}, #{enDescription}, + #{templateRequired} + ) + + + + UPDATE linkis_ps_configuration_config_key + + "key" = #{key}, + "name" = #{name}, + "description" = #{description}, + "engine_conn_type" = #{engineType}, + "default_value" = #{defaultValue}, + "validate_type" = #{validateType}, + "validate_range" = #{validateRange}, + "validate_range" = #{validateRange}, + "treeName" = #{treeName}, + "boundary_type" = #{boundaryType}, + "en_name" = #{enName}, + "en_treeName" = #{enTreeName}, + "en_description" = #{enDescription}, + "template_required" = #{templateRequired}, + + WHERE id = #{id} + + + + \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml new file mode 100644 index 0000000000..16fd6c014d --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/main/resources/mapper/postgresql/TemplateConfigKeyMapper.xml @@ -0,0 +1,161 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + id, template_name, template_uuid, key_id, config_value, max_value, min_value, validate_range, + is_valid, create_by, create_time, update_by, update_time + + + + insert into linkis_ps_configuration_template_config_key (id, template_name, template_uuid, + key_id, config_value, max_value, + min_value, validate_range, is_valid, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.id,jdbcType=BIGINT}, #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.minValue,jdbcType=VARCHAR}, #{item.validateRange,jdbcType=VARCHAR}, #{item.isValid,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, now(), #{item.updateBy,jdbcType=VARCHAR}, now() + ) + + + + + + + delete + from linkis_ps_configuration_template_config_key + where template_uuid = #{templateUuid,jdbcType=VARCHAR} + and key_id in + + #{item} + + + + + insert into linkis_ps_configuration_template_config_key (template_name, template_uuid, + key_id, config_value, max_value, + create_by, create_time, update_by, update_time + ) + values + + ( + #{item.templateName,jdbcType=VARCHAR}, #{item.templateUuid,jdbcType=VARCHAR}, + #{item.keyId,jdbcType=BIGINT}, #{item.configValue,jdbcType=VARCHAR}, #{item.maxValue,jdbcType=VARCHAR}, + #{item.createBy,jdbcType=VARCHAR}, + + + #{item.createTime}, + + + now(), + + + #{item.updateBy,jdbcType=VARCHAR}, + + + #{item.updateTime} + + + now() + + + ) + + on duplicate key update + template_name =values(template_name), + config_value =values(config_value), + max_value =values(max_value), + update_by=values(update_by), + update_time= now() + + + + + + + + + + + + + + + diff --git a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala index 1a0f714522..3f86697254 100644 --- a/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala +++ b/linkis-public-enhancements/linkis-configuration/src/main/scala/org/apache/linkis/configuration/service/ConfigurationService.scala @@ -17,9 +17,9 @@ package org.apache.linkis.configuration.service -import org.apache.linkis.common.utils.Logging +import org.apache.linkis.common.utils.{Logging, Utils} import org.apache.linkis.configuration.conf.Configuration -import org.apache.linkis.configuration.dao.{ConfigMapper, LabelMapper} +import org.apache.linkis.configuration.dao.{ConfigKeyLimitForUserMapper, ConfigMapper, LabelMapper} import org.apache.linkis.configuration.entity._ import org.apache.linkis.configuration.exception.ConfigurationException import org.apache.linkis.configuration.util.{LabelEntityParser, LabelParameterParser} @@ -42,6 +42,7 @@ import org.springframework.stereotype.Service import org.springframework.transaction.annotation.Transactional import org.springframework.util.CollectionUtils +import java.text.MessageFormat import java.util import scala.collection.JavaConverters._ @@ -57,6 +58,8 @@ class ConfigurationService extends Logging { @Autowired private var validatorManager: ValidatorManager = _ + @Autowired private var configKeyLimitForUserMapper: ConfigKeyLimitForUserMapper = _ + private val combinedLabelBuilder: CombinedLabelBuilder = new CombinedLabelBuilder @Transactional @@ -93,12 +96,6 @@ class ConfigurationService extends Logging { } } - def insertCreator(creator: String): Unit = { - val creatorID: Long = configMapper.selectAppIDByAppName(creator) - if (creatorID > 0) configMapper.insertCreator(creator) - else logger.warn(s"creator${creator} exists") - } - def checkAndCreateUserLabel( settings: util.List[ConfigKeyValue], username: String, @@ -178,6 +175,33 @@ class ConfigurationService extends Logging { createList: util.List[ConfigValue], updateList: util.List[ConfigValue] ): Any = { + + val configLabel = labelMapper.getLabelById(setting.getConfigLabelId) + val combinedLabel = combinedLabelBuilder + .buildFromStringValue(configLabel.getLabelKey, configLabel.getStringValue) + .asInstanceOf[CombinedLabel] + val templateConfigKeyVo = + configKeyLimitForUserMapper.selectByLabelAndKeyId(combinedLabel.getStringValue, setting.getId) + if (templateConfigKeyVo != null && StringUtils.isNotBlank(templateConfigKeyVo.getMaxValue)) { + Utils.tryCatch { + val maxValue = Integer.valueOf(templateConfigKeyVo.getMaxValue.replaceAll("[^0-9]", "")) + val configValue = Integer.valueOf(setting.getConfigValue.replaceAll("[^0-9]", "")) + if (configValue > maxValue) { + throw new ConfigurationException( + s"Parameter key:${setting.getKey},config value:${setting.getConfigValue} verification failed,exceeds the specified max value:${templateConfigKeyVo.getMaxValue}:(参数校验失败,超过指定的最大值):" + + s"${setting.getValidateType}--${setting.getValidateRange}" + ) + } + } { case exception: Exception => + if (exception.isInstanceOf[ConfigurationException]) { + throw exception + } else { + logger.warn( + s"Failed to check special limit setting for key:${setting.getKey},config value:${setting.getConfigValue}" + ) + } + } + } paramCheck(setting) if (setting.getIsUserDefined) { val configValue = new ConfigValue @@ -259,6 +283,12 @@ class ConfigurationService extends Logging { combinedLabel.asInstanceOf[CombinedLabelImpl] } + /** + * Priority: configs > defaultConfigs + * @param configs + * @param defaultConfigs + * @return + */ def buildTreeResult( configs: util.List[ConfigKeyValue], defaultConfigs: util.List[ConfigKeyValue] = new util.ArrayList[ConfigKeyValue]() @@ -269,9 +299,8 @@ class ConfigurationService extends Logging { defaultConfig.setIsUserDefined(false) configs.asScala.foreach(config => { if (config.getKey != null && config.getKey.equals(defaultConfig.getKey)) { - if (StringUtils.isNotBlank(config.getConfigValue)) { - defaultConfig.setConfigValue(config.getConfigValue) - } + // configValue also needs to be replaced when the value is empty + defaultConfig.setConfigValue(config.getConfigValue) defaultConfig.setConfigLabelId(config.getConfigLabelId) defaultConfig.setValueId(config.getValueId) defaultConfig.setIsUserDefined(true) @@ -380,6 +409,35 @@ class ConfigurationService extends Logging { replaceCreatorToEngine(defaultCreatorConfigs, defaultEngineConfigs) } } + + // add special config limit info + if (defaultEngineConfigs.size() > 0) { + val keyIdList = defaultEngineConfigs.asScala.toStream + .map(e => { + e.getId + }) + .toList + .asJava + val limitList = + configKeyLimitForUserMapper.selectByLabelAndKeyIds(combinedLabel.getStringValue, keyIdList) + defaultEngineConfigs.asScala.foreach(entity => { + val keyId = entity.getId + val res = limitList.asScala.filter(v => v.getKeyId == keyId).toList.asJava + if (res.size() > 0) { + val specialMap = new util.HashMap[String, String]() + val maxValue = res.get(0).getMaxValue + if (StringUtils.isNotBlank(maxValue)) { + specialMap.put("maxValue", maxValue) + entity.setSpecialLimit(specialMap) + } + } + }) + } else { + logger.warn( + s"The configuration is empty. Please check the configuration information in the database table(配置为空,请检查数据库表中关于标签${combinedLabel.getStringValue}的配置信息是否完整)" + ) + } + (configs, defaultEngineConfigs) } diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java new file mode 100644 index 0000000000..a7ef5a1c14 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigKeyLimitForUserMapperTest.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.ConfigKeyLimitForUser; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.List; +import java.util.UUID; + +import org.instancio.Instancio; +import org.instancio.Select; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +class ConfigKeyLimitForUserMapperTest extends BaseDaoTest { + + @Autowired ConfigKeyLimitForUserMapper configKeyLimitForUserMapper; + + String uuid = UUID.randomUUID().toString(); + String name = "for-test"; + + private List initData() { + List list = + Instancio.ofList(ConfigKeyLimitForUser.class) + .generate(Select.field(ConfigKeyLimitForUser::getIsValid), gen -> gen.oneOf("Y", "N")) + .create(); + ConfigKeyLimitForUser configKeyLimitForUser = new ConfigKeyLimitForUser(); + configKeyLimitForUser.setUserName("testuser"); + configKeyLimitForUser.setCombinedLabelValue("IDE-hadoop,spark-2.3.3"); + configKeyLimitForUser.setKeyId(1L); + configKeyLimitForUser.setLatestUpdateTemplateUuid(uuid); + configKeyLimitForUser.setCreateBy("test"); + configKeyLimitForUser.setUpdateBy("test"); + list.add(configKeyLimitForUser); + configKeyLimitForUserMapper.batchInsertList(list); + return list; + } + + @Test + void batchInsertOrUpdateListTest() { + List list = initData(); + list.get(1).setLatestUpdateTemplateUuid("123456"); + int isOk = configKeyLimitForUserMapper.batchInsertOrUpdateList(list); + Assertions.assertTrue(isOk > 1); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java index 619bd2be2c..a2bea5fd68 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/ConfigMapperTest.java @@ -17,23 +17,6 @@ package org.apache.linkis.configuration.dao; -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - import org.apache.linkis.configuration.entity.*; import org.springframework.beans.factory.annotation.Autowired; @@ -88,11 +71,6 @@ void testGetConfigKeyValueByLabelId() { assertEquals(7, configKeyValueList.size()); } - /** - * When using the h2 library for testing,if the function(on conflict) is not supported,an error - * will be reported, and the pg physical library will not guarantee an error pg使用h2库测试时不支持函数(on - * conflict)会报错,pg实体库不会报错 - */ @Test void testInsertValue() { ConfigValue result = insertConfigValue(); @@ -159,12 +137,6 @@ void testListKeyByStringValue() { // assertEquals(7, configKeyList.size()); } - @Test - void testInsertCreator() { - // mapper方法没有对应的实现类 - // configMapper.insertCreator("tom"); - } - @Test void testGetCategory() { List categoryLabelList = configMapper.getCategory(); @@ -208,6 +180,7 @@ void testUpdateCategory() { void testInsertKey() { ConfigKey configKey = new ConfigKey(); configKey.setKey("wds.linkis.rm.instance.max.max"); + configKey.setBoundaryType(3); configMapper.insertKey(configKey); ConfigKey result = configMapper.selectKeyByKeyID(8L); // assertEquals("wds.linkis.rm.instance.max.max", result.getKey()); diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java index 0046246818..4b7e69784e 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/LabelMapperTest.java @@ -17,23 +17,6 @@ package org.apache.linkis.configuration.dao; -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - import org.apache.linkis.configuration.entity.ConfigLabel; import org.springframework.beans.factory.annotation.Autowired; diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java new file mode 100644 index 0000000000..64b12ba7f4 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/TemplateConfigKeyMapperTest.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.configuration.dao; + +import org.apache.linkis.configuration.entity.TemplateConfigKey; + +import org.springframework.beans.factory.annotation.Autowired; + +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +import org.instancio.Instancio; +import org.instancio.Select; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +class TemplateConfigKeyMapperTest extends BaseDaoTest { + + @Autowired TemplateConfigKeyMapper templateConfigKeyMapper; + String uuid = UUID.randomUUID().toString(); + String name = "for-test"; + + private List initData() { + List list = + Instancio.ofList(TemplateConfigKey.class) + .size(9) + .generate(Select.field(TemplateConfigKey::getIsValid), gen -> gen.oneOf("Y", "N")) + .create(); + + TemplateConfigKey templateConfigKey = new TemplateConfigKey(); + templateConfigKey.setTemplateName(name); + templateConfigKey.setTemplateUuid(uuid); + templateConfigKey.setKeyId(1L); + templateConfigKey.setConfigValue("3"); + templateConfigKey.setMaxValue("8"); + templateConfigKey.setCreateBy("test"); + templateConfigKey.setUpdateBy("test"); + list.add(templateConfigKey); + templateConfigKeyMapper.batchInsertList(list); + return list; + } + + @Test + void selectListByTemplateUuid() { + initData(); + List res = templateConfigKeyMapper.selectListByTemplateUuid(uuid); + assertEquals(res.size(), 1); + assertEquals(res.get(0).getTemplateName(), name); + } + + @Test + void deleteByTemplateUuidAndKeyIdList() { + List list = initData(); + List KeyIdList = new ArrayList<>(); + KeyIdList.add(1L); + int num = templateConfigKeyMapper.deleteByTemplateUuidAndKeyIdList(uuid, KeyIdList); + assertEquals(num, 1); + } + + @Test + void batchInsertOrUpdateList() { + List list = initData(); + list.get(1).setConfigValue("20"); + int isOK = templateConfigKeyMapper.batchInsertOrUpdateList(list); + Assertions.assertTrue(isOK >= 1); + } + + @Test + void selectListByTemplateUuidList() { + List list = initData(); + List templateUuidList = new ArrayList<>(); + templateUuidList.add(uuid); + templateUuidList.add("123456"); + List res = + templateConfigKeyMapper.selectListByTemplateUuidList(templateUuidList); + Assertions.assertTrue(res.size() == 1); + } +} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java deleted file mode 100644 index ef466be542..0000000000 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserIpMapperTest.java +++ /dev/null @@ -1,91 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.configuration.dao; - -import org.apache.linkis.configuration.entity.UserIpVo; - -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.Date; -import java.util.List; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -class UserIpMapperTest extends BaseDaoTest { - - @Autowired UserIpMapper userIpMapper; - - UserIpVo insert() { - UserIpVo userIpVo = new UserIpVo(); - userIpVo.setUser("user"); - userIpVo.setBussinessUser("bussinessUser"); - userIpVo.setCreator("creator"); - userIpVo.setCreateTime(new Date()); - userIpVo.setUpdateTime(new Date()); - userIpVo.setDesc("desc"); - userIpVo.setIpList("ips"); - userIpMapper.createUserIP(userIpVo); - return userIpVo; - } - - @Test - void createUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - assertTrue(userIpVo != null); - } - - @Test - void deleteUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - userIpMapper.deleteUserIP(Integer.valueOf(userIpVo.getId())); - UserIpVo list = userIpMapper.queryUserIP("user", "creator"); - assertTrue(list == null); - } - - @Test - void updateUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - UserIpVo updateUserIpVo = new UserIpVo(); - updateUserIpVo.setId(userIpVo.getId()); - updateUserIpVo.setDesc("desc2"); - updateUserIpVo.setBussinessUser("bussinessUser2"); - userIpMapper.updateUserIP(updateUserIpVo); - UserIpVo userIpVo1 = userIpMapper.queryUserIP("user", "creator"); - assertTrue(userIpVo1.getDesc().equals("desc2")); - assertTrue(userIpVo1.getBussinessUser().equals("bussinessUser2")); - } - - @Test - void queryUserIP() { - insert(); - UserIpVo userIpVo = userIpMapper.queryUserIP("user", "creator"); - assertTrue(userIpVo != null); - } - - @Test - void queryUserIPList() { - insert(); - List userIpVos = userIpMapper.queryUserIPList("user", "creator"); - assertTrue(userIpVos.size() > 0); - } -} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java deleted file mode 100644 index 788409f2ed..0000000000 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/dao/UserTenantMapperTest.java +++ /dev/null @@ -1,84 +0,0 @@ -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.linkis.configuration.dao; - -import org.apache.linkis.configuration.entity.TenantVo; - -import org.springframework.beans.factory.annotation.Autowired; - -import java.util.Date; -import java.util.List; - -import org.junit.jupiter.api.Test; - -import static org.junit.jupiter.api.Assertions.assertTrue; - -class UserTenantMapperTest extends BaseDaoTest { - - @Autowired UserTenantMapper userTenantMapper; - - TenantVo insert() { - TenantVo tenantVo = new TenantVo(); - tenantVo.setUser("user"); - tenantVo.setCreateTime(new Date()); - tenantVo.setCreator("creator"); - tenantVo.setTenantValue("tenantValue"); - tenantVo.setUpdateTime(new Date()); - tenantVo.setBussinessUser("bussinessUser"); - tenantVo.setDesc("desc"); - userTenantMapper.createTenant(tenantVo); - return tenantVo; - } - - @Test - void createTenant() { - insert(); - List tenantVos = userTenantMapper.queryTenantList("user", "creator", "tenantValue"); - assertTrue(tenantVos.size() > 0); - } - - @Test - void deleteTenant() { - insert(); - TenantVo tenantVo = userTenantMapper.queryTenant("user", "creator"); - userTenantMapper.deleteTenant(Integer.valueOf(tenantVo.getId())); - List tenantVos = userTenantMapper.queryTenantList("user", "creator", "tenantValue"); - assertTrue(tenantVos.size() == 0); - } - - @Test - void updateTenant() { - insert(); - TenantVo tenantVo = userTenantMapper.queryTenant("user", "creator"); - TenantVo updateTenantVo = new TenantVo(); - updateTenantVo.setId(tenantVo.getId()); - updateTenantVo.setDesc("desc2"); - updateTenantVo.setBussinessUser("bussinessUser2"); - userTenantMapper.updateTenant(updateTenantVo); - TenantVo queryTenant = userTenantMapper.queryTenant("user", "creator"); - assertTrue(queryTenant.getDesc().equals("desc2")); - assertTrue(queryTenant.getBussinessUser().equals("bussinessUser2")); - } - - @Test - void queryTenant() { - insert(); - TenantVo tenantVo = userTenantMapper.queryTenant("user", "creator"); - assertTrue(tenantVo != null); - } -} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java index eae854ca13..9bfee23682 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/exception/ConfigurationExceptionTest.java @@ -17,21 +17,4 @@ package org.apache.linkis.configuration.exception; -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - public class ConfigurationExceptionTest {} diff --git a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java index 0974743014..5170824281 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java +++ b/linkis-public-enhancements/linkis-configuration/src/test/java/org/apache/linkis/configuration/restful/api/ConfigurationRestfulApiTest.java @@ -57,7 +57,7 @@ public void TestAddKeyForEngine() throws Exception { paramsMap.add("engineType", "spark"); paramsMap.add("version", "3.2.1"); paramsMap.add("token", "e8724-e"); - paramsMap.add("keyJson", "{'engineType':'spark','version':'3.2.1'}"); + paramsMap.add("keyJson", "{'engineType':'spark','version':'3.2.1','boundaryType':'0'}"); String url = "/configuration/addKeyForEngine"; sendUrl(url, paramsMap, "get", null); } diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties b/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties index 1b9ac21258..1dd49b9917 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties @@ -55,6 +55,5 @@ eureka.client.enabled=false eureka.client.serviceUrl.registerWithEureka=false mybatis-plus.mapper-locations=classpath*:mapper/common/*.xml,classpath*:mapper/mysql/*.xml -#mybatis-plus.mapper-locations=classpath*:mapper/common/*.xml,classpath*:mapper/postgresql/*.xml mybatis-plus.type-aliases-package=org.apache.linkis.configuration.entity mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql b/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql index fb22c7a114..4d3c587601 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/create.sql @@ -23,7 +23,7 @@ DROP TABLE IF EXISTS `linkis_cg_manager_label`; CREATE TABLE `linkis_cg_manager_label` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_key` varchar(50) NOT NULL, + `label_key` varchar(32) NOT NULL, `label_value` varchar(255) NOT NULL, `label_feature` varchar(16) NOT NULL, `label_value_size` int(20) NOT NULL, @@ -48,6 +48,10 @@ CREATE TABLE `linkis_ps_configuration_config_key` `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', + `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', + `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', PRIMARY KEY (`id`) ); @@ -88,65 +92,42 @@ CREATE TABLE `linkis_ps_configuration_category` UNIQUE INDEX (`label_id`) ); -DROP TABLE IF EXISTS `linkis_cg_user_ip_config`; -CREATE TABLE `linkis_cg_user_ip_config` ( - `id` int(20) NOT NULL AUTO_INCREMENT, - `user` varchar(50) NOT NULL, - `creator` varchar(50) NOT NULL, - `ip_list` text NOT NULL, - `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `desc` varchar(100) NOT NULL, - `bussiness_user` varchar(50) NOT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_user_creator_uic` (`user`,`creator`) -); -DROP TABLE IF EXISTS `linkis_cg_tenant_label_config`; -CREATE TABLE `linkis_cg_tenant_label_config` ( - `id` int(20) NOT NULL AUTO_INCREMENT, - `user` varchar(50) NOT NULL, - `creator` varchar(50) NOT NULL, - `tenant_value` varchar(128) NOT NULL, - `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - `desc` varchar(100) NOT NULL, - `bussiness_user` varchar(50) NOT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_user_creator_tlc` (`user`,`creator`) +DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; +CREATE TABLE `linkis_ps_configuration_template_config_key` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `template_name` varchar(200) NOT NULL COMMENT '配置模板名称 冗余存储', + `template_uuid` varchar(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `key_id` int(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` varchar(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` varchar(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` varchar(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `validate_range` varchar(50) NULL DEFAULT NULL COMMENT '校验正则(预留) ', + `is_valid` varchar(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` varchar(50) NOT NULL COMMENT '创建人', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` varchar(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`) ); -DELETE FROM linkis_cg_manager_label; - -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-*,*-*', 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); - -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); - -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (1,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (2,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (3,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (4,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (5,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (6,1); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (7,1); - -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (1,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (2,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (3,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (4,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (5,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (6,'1',1); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (7,'1',1); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (1, 1); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (2, 1); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (3, 1); \ No newline at end of file +DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; +CREATE TABLE `linkis_ps_configuration_key_limit_for_user` ( + `id` int(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(50) NOT NULL COMMENT '用户名', + `combined_label_value` varchar(200) NOT NULL COMMENT '组合标签 combined_userCreator_engineType 如 hadoop-IDE,spark-2.4.3', + `key_id` int(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', + `config_value` varchar(200) NULL DEFAULT NULL COMMENT '配置值', + `max_value` varchar(50) NULL DEFAULT NULL COMMENT '上限值', + `min_value` varchar(50) NULL DEFAULT NULL COMMENT '下限值(预留)', + `latest_update_template_uuid` varchar(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', + `is_valid` varchar(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', + `create_by` varchar(50) NOT NULL COMMENT '创建人', + `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', + `update_by` varchar(50) NULL DEFAULT NULL COMMENT '更新人', + `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', + PRIMARY KEY (`id`), + UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) +); \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql b/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql new file mode 100644 index 0000000000..4137dbbf16 --- /dev/null +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/data.sql @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. +*/ + +DELETE FROM linkis_cg_manager_label; + +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-*,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); + +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源',0); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源',3); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`,`boundary_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源',3); + +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (1,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (2,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (3,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (4,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (5,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (6,1); +insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) values (7,1); + +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (1,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (2,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (3,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (4,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (5,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (6,'1',1); +insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) values (7,'1',1); + +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (1, 1); +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (2, 1); +insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES (3, 1); diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml index 8e44eefd8b..31bf7a38e5 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml @@ -44,6 +44,11 @@ linkis-pes-rpc-client ${project.version} + + org.apache.linkis + linkis-ps-common-lock + ${project.version} + org.apache.linkis diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java index 4892d6b090..3cf7d67a01 100644 --- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/conf/ContextServerConf.java @@ -34,4 +34,7 @@ public class ContextServerConf { public static final long CS_SCHEDULER_JOB_WAIT_MILLS = CommonVars.apply("wds.linkis.cs.job.wait.mills", 10000).getValue(); + + public static final String CS_LABEL_SUFFIX = + CommonVars.apply("wds.linkis.cs.label.suffix", "").getValue(); } diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java new file mode 100644 index 0000000000..7e3b671385 --- /dev/null +++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/src/main/java/org/apache/linkis/cs/server/label/CSInstanceLabelClient.java @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.cs.server.label; + +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.cs.server.conf.ContextServerConf; +import org.apache.linkis.instance.label.client.InstanceLabelClient; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.protocol.label.InsLabelRefreshRequest; +import org.apache.linkis.protocol.label.InsLabelRemoveRequest; +import org.apache.linkis.publicservice.common.lock.entity.CommonLock; +import org.apache.linkis.publicservice.common.lock.service.CommonLockService; +import org.apache.linkis.rpc.Sender; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.availability.AvailabilityChangeEvent; +import org.springframework.boot.availability.AvailabilityState; +import org.springframework.boot.availability.ReadinessState; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.context.event.EventListener; +import org.springframework.stereotype.Component; + +import java.util.Date; +import java.util.HashMap; +import java.util.Map; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import static org.springframework.boot.availability.ReadinessState.ACCEPTING_TRAFFIC; + +@Component +public class CSInstanceLabelClient { + private static final Logger logger = LoggerFactory.getLogger(CSInstanceLabelClient.class); + + @Autowired CommonLockService commonLockService; + + private String _LOCK = "_MASTER_PS_CS_LABEL_LOCK"; + CommonLock commonLock = new CommonLock(); + private boolean lock = false; + + @EventListener(classes = {AvailabilityChangeEvent.class}) + public void init(AvailabilityChangeEvent availabilityChangeEvent) { + AvailabilityState state = availabilityChangeEvent.getState(); + logger.info("CSInstanceLabelClient app state {}", state); + + if (state instanceof ReadinessState && state == ACCEPTING_TRAFFIC) { + Map labels = new HashMap<>(1); + commonLock.setLockObject(_LOCK); + commonLock.setCreateTime(new Date()); + commonLock.setUpdateTime(new Date()); + commonLock.setCreator(Utils.getJvmUser()); + commonLock.setLocker(Utils.getLocalHostname()); + commonLock.setUpdator(Utils.getJvmUser()); + lock = commonLockService.reentrantLock(commonLock, -1L); + String suffix = ContextServerConf.CS_LABEL_SUFFIX; + String confLabel; + + if (lock) { + // master node set cs_1_xxx label + logger.info("The master ps-cs node get lock by {}", _LOCK + "-" + commonLock.getLocker()); + confLabel = "cs_1_" + suffix; + } else { + confLabel = "cs_2_" + suffix; + } + logger.info("register label {} to ps-cs node.", confLabel); + labels.put(LabelKeyConstant.ROUTE_KEY, confLabel); + InsLabelRefreshRequest insLabelRefreshRequest = new InsLabelRefreshRequest(); + insLabelRefreshRequest.setLabels(labels); + insLabelRefreshRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().refreshLabelsToInstance(insLabelRefreshRequest); + } + } + + @EventListener(classes = {ContextClosedEvent.class}) + public void shutdown(ContextClosedEvent contextClosedEvent) { + logger.info("To remove labels for instance"); + InsLabelRemoveRequest insLabelRemoveRequest = new InsLabelRemoveRequest(); + insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance()); + InstanceLabelClient.getInstance().removeLabelsFromInstance(insLabelRemoveRequest); + logger.info("success to send clear label rpc request"); + if (lock) { + commonLockService.unlock(commonLock); + logger.info( + "The master ps-cs node has released lock {}.", + commonLock.getLockObject() + "-" + commonLock.getLocker()); + } + } +} diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java index 5b333cd6f6..db379f0853 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/restful/InstanceRestful.java @@ -37,19 +37,14 @@ import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.RequestBody; -import org.springframework.web.bind.annotation.RequestMapping; -import org.springframework.web.bind.annotation.RequestMethod; -import org.springframework.web.bind.annotation.RestController; +import org.springframework.cloud.client.discovery.DiscoveryClient; +import org.springframework.web.bind.annotation.*; import javax.servlet.http.HttpServletRequest; -import java.util.ArrayList; -import java.util.Date; -import java.util.HashSet; -import java.util.Iterator; -import java.util.List; -import java.util.Set; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; import com.fasterxml.jackson.databind.JsonNode; import com.github.xiaoymin.knife4j.annotations.ApiOperationSupport; @@ -74,6 +69,8 @@ public class InstanceRestful { @Autowired private DefaultInsLabelService insLabelService; + @Autowired private DiscoveryClient discoveryClient; + @ApiOperation( value = "listAllInstanceWithLabel", notes = "list all instance with label", @@ -172,4 +169,31 @@ public Message getServiceRegistryURL(HttpServletRequest request) throws Exceptio ModuleUserUtils.getOperationUser(request, "getServiceRegistryURL"); return Message.ok().data("url", serviceRegistryURL); } + + @ApiOperation(value = "getServiceInstances", response = Message.class) + @ApiImplicitParams({ + @ApiImplicitParam(name = "serviceName", required = false, dataType = "String"), + @ApiImplicitParam(name = "ip", required = false, dataType = "ip") + }) + @RequestMapping(path = "/serviceInstances", method = RequestMethod.GET) + public Message getServiceInstance( + HttpServletRequest request, + @RequestParam(value = "serviceName", required = false) String serviceName, + @RequestParam(value = "ip", required = false) String ip) { + Stream serviceStream = discoveryClient.getServices().stream(); + serviceStream = serviceStream.filter(s -> s.toUpperCase().contains("LINKIS")); + if (StringUtils.isNotBlank(serviceName)) { + serviceStream = + serviceStream.filter(s -> s.toUpperCase().contains(serviceName.toUpperCase())); + } + List instanceList = + serviceStream + .flatMap(serviceId -> discoveryClient.getInstances(serviceId).stream()) + .collect(Collectors.toList()); + if (StringUtils.isNotBlank(ip)) { + instanceList = + instanceList.stream().filter(s -> s.getHost().equals(ip)).collect(Collectors.toList()); + } + return Message.ok().data("list", instanceList); + } } diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java index 5cc3bcc633..3b7aaf4c4b 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/java/org/apache/linkis/instance/label/service/impl/DefaultInsLabelService.java @@ -140,12 +140,6 @@ public void refreshLabelsToInstance( List> labels, ServiceInstance serviceInstance) throws InstanceErrorException { List insLabels = toInsPersistenceLabels(labels); - // Label candidate to be removed - List labelsCandidateRemoved = - insLabelRelationDao.searchLabelsByInstance(serviceInstance.getInstance()); - if (!labelsCandidateRemoved.isEmpty()) { - labelsCandidateRemoved.removeAll(insLabels); - } LOG.info("Drop relationships related by instance: [" + serviceInstance.getInstance() + "]"); insLabelRelationDao.dropRelationsByInstance(serviceInstance.getInstance()); // Attach labels to instance diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml index d3d0cfbe42..3263252d33 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/mysql/InsLabelRelationMapper.xml @@ -197,7 +197,10 @@ \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml index 9f19cdd14b..6d984aa1d6 100644 --- a/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml +++ b/linkis-public-enhancements/linkis-instance-label/linkis-instance-label-server/src/main/resources/mapper/postgresql/InsLabelRelationMapper.xml @@ -197,6 +197,9 @@ \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java index c83d730b8b..7ff5aeb32d 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/cache/impl/DefaultQueryCacheManager.java @@ -195,12 +195,7 @@ public void refreshUndoneTask() { Date sDate = DateUtils.addDays(eDate, -1); queryTasks = jobHistoryMapper.searchWithIdOrderAsc( - undoneTaskMinId, - null, - Arrays.asList("Running", "Inited", "Scheduled"), - sDate, - eDate, - null); + sDate, eDate, undoneTaskMinId, Arrays.asList("Running", "Inited", "Scheduled")); } finally { PageHelper.clearPage(); } diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java index 7bb7656346..c25eee4a2e 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/dao/JobHistoryMapper.java @@ -33,12 +33,10 @@ public interface JobHistoryMapper { void updateJobHistory(JobHistory jobReq); List searchWithIdOrderAsc( - @Param("id") Long id, - @Param("umUser") String username, - @Param("status") List status, @Param("startDate") Date startDate, @Param("endDate") Date endDate, - @Param("engineType") String engineType); + @Param("startId") Long startId, + @Param("status") List status); List search( @Param("id") Long id, diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java index f627fb6b15..a18da3a042 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/java/org/apache/linkis/jobhistory/restful/api/QueryRestfulApi.java @@ -25,6 +25,7 @@ import org.apache.linkis.jobhistory.conversions.TaskConversions; import org.apache.linkis.jobhistory.entity.*; import org.apache.linkis.jobhistory.service.JobHistoryQueryService; +import org.apache.linkis.jobhistory.transitional.TaskStatus; import org.apache.linkis.jobhistory.util.QueryUtils; import org.apache.linkis.protocol.constants.TaskConstant; import org.apache.linkis.server.Message; @@ -101,6 +102,10 @@ public Message getTaskByID(HttpServletRequest req, @PathVariable("id") Long jobI return Message.error( "The corresponding job was not found, or there may be no permission to view the job" + "(没有找到对应的job,也可能是没有查看该job的权限)"); + } else if (taskVO.getStatus().equals(TaskStatus.Running.toString())) { + // 任务运行时不显示异常信息(Do not display exception information during task runtime) + taskVO.setErrCode(null); + taskVO.setErrDesc(null); } return Message.ok().data(TaskConstant.TASK, taskVO); @@ -227,14 +232,30 @@ public Message list( /** Method list should not contain subjob, which may cause performance problems. */ @ApiOperation(value = "listundonetasks", notes = "list undone tasks", response = Message.class) @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", dataType = "long"), - @ApiImplicitParam(name = "endDate", required = false, dataType = "long", value = "end date"), + @ApiImplicitParam( + name = "startDate", + required = false, + dataType = "Long", + value = "start date"), + @ApiImplicitParam(name = "endDate", required = false, dataType = "Long", value = "end date"), @ApiImplicitParam(name = "status", required = false, dataType = "String", value = "status"), @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", value = "page now"), - @ApiImplicitParam(name = "pageSize", dataType = "Integer"), - @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), - @ApiImplicitParam(name = "engineType", dataType = "String"), - @ApiImplicitParam(name = "startTaskID", dataType = "long"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + value = "page size"), + @ApiImplicitParam( + name = "startTaskID", + required = false, + dataType = "Long", + value = "start task id"), + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engine type"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator") }) @RequestMapping(path = "/listundonetasks", method = RequestMethod.GET) public Message listundonetasks( @@ -321,13 +342,29 @@ public Message listundonetasks( @ApiOperation(value = "listundone", notes = "list undone", response = Message.class) @ApiImplicitParams({ - @ApiImplicitParam(name = "startDate", dataType = "long"), - @ApiImplicitParam(name = "endDate", required = false, dataType = "long", value = "end date"), - @ApiImplicitParam(name = "status", required = false, dataType = "String", value = "status"), + @ApiImplicitParam( + name = "startDate", + required = false, + dataType = "Long", + value = "start date"), + @ApiImplicitParam(name = "endDate", required = false, dataType = "Long", value = "end date"), @ApiImplicitParam(name = "pageNow", required = false, dataType = "Integer", value = "page now"), - @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator"), - @ApiImplicitParam(name = "engineType", dataType = "String"), - @ApiImplicitParam(name = "startTaskID", dataType = "long"), + @ApiImplicitParam( + name = "pageSize", + required = false, + dataType = "Integer", + value = "page size"), + @ApiImplicitParam( + name = "startTaskID", + required = false, + dataType = "Long", + value = "startTaskID"), + @ApiImplicitParam( + name = "engineType", + required = false, + dataType = "String", + value = "engineType"), + @ApiImplicitParam(name = "creator", required = false, dataType = "String", value = "creator") }) /** Method list should not contain subjob, which may cause performance problems. */ @RequestMapping(path = "/listundone", method = RequestMethod.GET) @@ -350,17 +387,13 @@ public Message listundone( } if (StringUtils.isEmpty(creator)) { creator = null; - } else { - if (!QueryUtils.checkNameValid(creator)) { - return Message.error("Invalid creator : " + creator); - } + } else if (!QueryUtils.checkNameValid(creator)) { + return Message.error("Invalid creator : " + creator); } if (StringUtils.isEmpty(engineType)) { engineType = null; - } else { - if (!QueryUtils.checkNameValid(engineType)) { - return Message.error("Invalid engienType: " + engineType); - } + } else if (!QueryUtils.checkNameValid(engineType)) { + return Message.error("Invalid engienType: " + engineType); } Date sDate = new Date(startDate); Date eDate = new Date(endDate); diff --git a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml index 7a81b6c87a..c2a533a68b 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml +++ b/linkis-public-enhancements/linkis-jobhistory/src/main/resources/mapper/mysql/JobHistoryMapper.xml @@ -98,13 +98,11 @@ - SELECT * - FROM linkis_ps_common_lock + select * from linkis_ps_common_lock + + diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java b/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java index 860ea76a7f..bcae6889b8 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/test/java/org/apache/linkis/publicservice/common/lock/dao/CommonLockMapperTest.java @@ -38,11 +38,45 @@ public void getAllTest() { Assertions.assertTrue(locks.size() == 1); } + public Boolean reentrantLock(CommonLock commonLock) { + CommonLock oldLock = + commonLockMapper.getLockByLocker(commonLock.getLockObject(), commonLock.getLocker()); + if (oldLock != null) { + return true; + } + + try { + commonLockMapper.lock(commonLock, -1L); + } catch (Exception e) { + return false; + } + return true; + } + + @Test + @DisplayName("reentrantLockTest") + public void reentrantLockTest() { + String lockObject = "hadoop-warehouse4"; + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(lockObject); + commonLock.setLocker("test"); + Boolean lock = reentrantLock(commonLock); + Assertions.assertTrue(lock); + lock = reentrantLock(commonLock); + Assertions.assertTrue(lock); + commonLock.setLocker("test1"); + lock = reentrantLock(commonLock); + Assertions.assertFalse(lock); + } + @Test @DisplayName("unlockTest") public void unlockTest() { String lockObject = "hadoop-warehouse"; - commonLockMapper.unlock(lockObject); + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(lockObject); + commonLock.setLocker("test"); + commonLockMapper.unlock(commonLock); List locks = commonLockMapper.getAll(); Assertions.assertTrue(locks.size() == 0); @@ -53,7 +87,14 @@ public void unlockTest() { public void lockTest() { String lockObject = "hadoop-warehouse2"; Long timeOut = 10000L; - commonLockMapper.lock(lockObject, timeOut); + CommonLock commonLock = new CommonLock(); + commonLock.setLockObject(lockObject); + + Assertions.assertThrows( + RuntimeException.class, () -> commonLockMapper.lock(commonLock, timeOut)); + + commonLock.setLocker("test"); + commonLockMapper.lock(commonLock, timeOut); List locks = commonLockMapper.getAll(); Assertions.assertTrue(locks.size() == 2); } diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java index 9d6144ba07..ac3b90b128 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/java/org/apache/linkis/udf/service/impl/UDFServiceImpl.java @@ -113,6 +113,8 @@ public long addUDF(UDFAddVo udfVo, String userName) throws Exception { // 锁同一用户 CommonLock commonLock = new CommonLock(); commonLock.setLockObject(userName + _LOCK); + commonLock.setCreator(userName); + commonLock.setLocker(Utils.getLocalHostname()); commonLock.setCreateTime(new Date()); commonLock.setUpdateTime(new Date()); try { @@ -322,6 +324,8 @@ public void updateUDF(UDFUpdateVo udfUpdateVo, String userName) throws Exception // udfInfo.setPath(StringUtils.replace(udfInfo.getPath(), "file://", "")); CommonLock persistenceLock = new CommonLock(); persistenceLock.setLockObject(userName + _LOCK); + persistenceLock.setCreator(userName); + persistenceLock.setLocker(Utils.getLocalHostname()); persistenceLock.setCreateTime(new Date()); persistenceLock.setUpdateTime(new Date()); try { diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml index ae27759e52..f04e5db482 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/main/resources/mapper/mysql/UDFDao.xml @@ -31,6 +31,7 @@ + @@ -102,7 +103,7 @@ - INSERT INTO linkis_ps_udf_user_load (`udf_id`,`user_name`) VALUES (#{arg0},#{arg1}) + INSERT INTO linkis_ps_udf_user_load (`udf_id`,`user_name`,`create_time`,`update_time`) VALUES (#{arg0},#{arg1},now(),now()) SELECT - - FROM linkis_ps_udf_baseinfo - WHERE create_user in + info.id,info.`create_user`,info.`udf_name`,info.`udf_type`,info.`is_expire`,info.`is_shared`,info.`tree_id`,info.`create_time`,info.`update_time`, + info.`sys`,info.`cluster_name`,udf_version.description + FROM + linkis_ps_udf_baseinfo info , + (SELECT + udf_version.* + FROM + linkis_ps_udf_version udf_version , ( + SELECT + udf_id , MAX(bml_resource_version) AS bml_resource_version + FROM + linkis_ps_udf_version + GROUP BY + udf_id) version_tmp + WHERE + version_tmp.udf_id = udf_version.udf_id + AND version_tmp.bml_resource_version = udf_version.bml_resource_version) udf_version + WHERE + info.id = udf_version.udf_id + AND info.create_user in #{item} @@ -124,10 +142,29 @@ @@ -118,9 +118,26 @@ SELECT - - from linkis_ps_udf_baseinfo - where id in - (select udf_id from linkis_ps_udf_shared_info where user_name=#{userName}) + info.id,info."create_user",info."udf_name",info."udf_type",info."is_expire",info."is_shared",info."tree_id",info."create_time",info."update_time", + info."sys",info."cluster_name", version_tmp.description + FROM + linkis_ps_udf_baseinfo info , + ( + SELECT + version_info.* + FROM + linkis_ps_udf_version version_info , ( + SELECT + udf_id , MAX(bml_resource_version) AS bml_resource_version + FROM + linkis_ps_udf_version + GROUP BY + udf_id + ) version_max + WHERE + version_max.udf_id = version_info.udf_id + AND version_max.bml_resource_version = version_info.bml_resource_version + ) version_tmp + WHERE + info.id = version_tmp.udf_id + AND info.id in (SELECT udf_id FROM linkis_ps_udf_shared_info WHERE user_name = #{userName}) - update linkis_ps_udf_user_load set user_name=#{newUser} where udf_id=#{udfId} and user_name=#{oldUser} + update linkis_ps_udf_user_load set user_name=#{newUser}, update_time =now() where udf_id=#{udfId} and user_name=#{oldUser} - update linkis_ps_udf_version set is_published=#{isPublished} where udf_id=#{udfId} and + update linkis_ps_udf_version set is_published=#{isPublished},update_time =now() where udf_id=#{udfId} and bml_resource_version=#{version} diff --git a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql index f865efd51b..6262d1e86e 100644 --- a/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-udf/linkis-udf-service/src/test/resources/create.sql @@ -20,11 +20,13 @@ SET REFERENTIAL_INTEGRITY FALSE; DROP TABLE IF EXISTS linkis_ps_udf_user_load CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_user_load ( - id bigint(20) NOT NULL AUTO_INCREMENT, - udf_id bigint(20) NOT NULL, - user_name varchar(50) NOT NULL, - PRIMARY KEY (id) -) ; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `udf_id` bigint(20) NOT NULL, + `user_name` varchar(50) NOT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS linkis_ps_udf_baseinfo CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_baseinfo ( @@ -67,6 +69,7 @@ CREATE TABLE IF NOT EXISTS linkis_ps_udf_version ( use_format varchar(255) DEFAULT NULL, description varchar(255) NOT NULL COMMENT 'version desc', create_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + update_time timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, md5 varchar(100) DEFAULT NULL, PRIMARY KEY (id) ) ; @@ -82,10 +85,12 @@ CREATE TABLE IF NOT EXISTS linkis_ps_udf_shared_info ( DROP TABLE IF EXISTS linkis_ps_udf_manager CASCADE; CREATE TABLE IF NOT EXISTS linkis_ps_udf_manager ( - id bigint(20) NOT NULL AUTO_INCREMENT, - user_name varchar(20) DEFAULT NULL, - PRIMARY KEY (id) -) ; + `id` bigint(20) NOT NULL AUTO_INCREMENT, + `user_name` varchar(20) DEFAULT NULL, + `update_time` datetime DEFAULT CURRENT_TIMESTAMP, + `create_time` datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY (`id`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8; DELETE FROM linkis_ps_udf_user_load; -- ---------------------------- diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala index f7558fcad6..cace679033 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/config/GatewayConfiguration.scala @@ -108,4 +108,6 @@ object GatewayConfiguration { val IS_DOWNLOAD = CommonVars("linkis.web.result.set.export.enable", true) + val LINKIS_CLUSTER_NAME = CommonVars("linkis.cluster.name", "") + } diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala index b0f42f9ad0..40b0630706 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/org/apache/linkis/gateway/security/UserRestful.scala @@ -73,7 +73,7 @@ abstract class AbstractUserRestful extends UserRestful with Logging { Utils.tryCatch { val loginUser = GatewaySSOUtils.getLoginUsername(gatewayContext) Message - .ok(loginUser + "Already logged in, please log out before signing in(已经登录,请先退出再进行登录)!") + .ok(loginUser + " already logged in, please log out before signing in(已经登录,请先退出再进行登录)!") .data("userName", loginUser) }(_ => login(gatewayContext)) case "token-login" => @@ -146,6 +146,8 @@ abstract class AbstractUserRestful extends UserRestful with Logging { Message .ok("get baseinfo success(获取成功)!") .data("resultSetExportEnable", GatewayConfiguration.IS_DOWNLOAD.getValue) + .data("linkisClusterName", GatewayConfiguration.LINKIS_CLUSTER_NAME.getValue) + } def publicKey(gatewayContext: GatewayContext): Message = { diff --git a/pom.xml b/pom.xml index db87cce8c3..eec3219c1a 100644 --- a/pom.xml +++ b/pom.xml @@ -1258,6 +1258,15 @@ pom import + + + + org.instancio + instancio-junit + 2.16.1 + test + + org.assertj assertj-core