From 164720a04ceb3a82a1c3e37775902722bb464b10 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Cv=5Fkkhuang=E2=80=9D?= <“420895376@qq.com”> Date: Thu, 19 Sep 2024 15:42:56 +0800 Subject: [PATCH] revent build action code --- .../conf/GovernanceCommonConfTest.scala | 4 +- .../src/test/resources/application.properties | 16 +- linkis-dist/package/db/linkis_ddl.sql | 392 ++++-------------- linkis-dist/package/db/linkis_dml.sql | 146 ++----- .../upgrade/1.6.0_schema/mysql/linkis_ddl.sql | 81 +--- .../upgrade/1.6.0_schema/mysql/linkis_dml.sql | 87 ---- .../ElasticSearchEngineConnExecutor.java | 6 +- .../launch/FlinkEngineConnLaunchBuilder.scala | 13 + .../OpenLooKengEngineConnExecutor.java | 4 +- .../executor/PrestoEngineConnExecutor.java | 4 +- .../linkis/bml/dao/BmlProjectDaoTest.java | 10 +- .../linkis/bml/dao/ResourceDaoTest.java | 2 - .../apache/linkis/bml/dao/VersionDaoTest.java | 370 ++++++++--------- .../src/test/resources/application.properties | 9 +- .../src/test/resources/create.sql | 216 +++++----- .../src/test/resources/application.properties | 18 +- .../cs/server/conf/ContextServerConfTest.java | 1 + .../src/test/resources/application.properties | 18 +- .../src/test/resources/application.yml | 13 - .../src/test/resources/create.sql | 18 +- .../src/test/resources/application.properties | 4 +- .../src/test/resources/application.properties | 2 - .../src/test/resources/application.properties | 9 +- .../src/test/resources/application.properties | 18 +- .../src/test/resources/create.sql | 2 +- .../src/test/resources/application.properties | 18 +- .../src/test/resources/application.properties | 20 +- .../src/test/resources/application.properties | 7 +- tool/dependencies/known-dependencies.txt | 10 - 29 files changed, 518 insertions(+), 1000 deletions(-) diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala index 7988a6c95d..96b6e9a1c2 100644 --- a/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala +++ b/linkis-computation-governance/linkis-computation-governance-common/src/test/scala/org/apache/linkis/governance/common/conf/GovernanceCommonConfTest.scala @@ -42,8 +42,8 @@ class GovernanceCommonConfTest { val errorcodedesclen = GovernanceCommonConf.ERROR_CODE_DESC_LEN Assertions.assertEquals("wds.linkis.rm", conffilterrm) - Assertions.assertEquals("2.4.3", sparkengineversion) - Assertions.assertEquals("1.2.1", hiveengineversion) + Assertions.assertEquals("3.2.1", sparkengineversion) + Assertions.assertEquals("3.1.3", hiveengineversion) Assertions.assertEquals("python2", pythonengineversion) Assertions.assertFalse(pythoncodeparserswitch) Assertions.assertFalse(scalacodeparserswitch) diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties index e0fa5147fc..59651dc3ae 100644 --- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties +++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/resources/application.properties @@ -18,13 +18,19 @@ #h2 database config spring.datasource.driver-class-name=org.h2.Driver #init -spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 mybatis-plus.mapper-locations=classpath*:mapper/common/*.xml,classpath*:mapper/mysql/*.xml mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl diff --git a/linkis-dist/package/db/linkis_ddl.sql b/linkis-dist/package/db/linkis_ddl.sql index 92835c9e09..6cb3c839e5 100644 --- a/linkis-dist/package/db/linkis_ddl.sql +++ b/linkis-dist/package/db/linkis_ddl.sql @@ -26,13 +26,6 @@ -- 组合索引建议包含所有字段名,过长的字段名可以采用缩写形式。例如idx_age_name_add -- 索引名尽量不超过50个字符,命名应该使用小写 - --- 注意事项 --- 1. TDSQL层面做了硬性规定,对于varchar索引,字段总长度不能超过768个字节,建议组合索引的列的长度根据实际列数值的长度定义,比如身份证号定义长度为varchar(20),不要定位为varchar(100), --- 同时,由于TDSQL默认采用UTF8字符集,一个字符3个字节,因此,实际索引所包含的列的长度要小于768/3=256字符长度。 --- 2. AOMP 执行sql 语句 create table 可以带反撇号,alter 语句不能带反撇号 --- 3. 使用 alter 添加、修改字段时请带要字符集和排序规则 CHARSET utf8mb4 COLLATE utf8mb4_bin - SET FOREIGN_KEY_CHECKS=0; DROP TABLE IF EXISTS `linkis_ps_configuration_config_key`; @@ -49,14 +42,14 @@ CREATE TABLE `linkis_ps_configuration_config_key`( `is_advanced` tinyint(1) DEFAULT NULL COMMENT 'Whether it is an advanced parameter. If set to 1(true), parameters would be displayed only when user choose to do so', `level` tinyint(1) DEFAULT NULL COMMENT 'Basis for displaying sorting in the front-end. Higher the level is, higher the rank the parameter gets', `treeName` varchar(20) DEFAULT NULL COMMENT 'Reserved field, representing the subdirectory of engineType', - `boundary_type` tinyint(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', + `boundary_type` int(2) NOT NULL DEFAULT '0' COMMENT '0 none/ 1 with mix /2 with max / 3 min and max both', `en_description` varchar(200) DEFAULT NULL COMMENT 'english description', `en_name` varchar(100) DEFAULT NULL COMMENT 'english name', `en_treeName` varchar(100) DEFAULT NULL COMMENT 'english treeName', `template_required` tinyint(1) DEFAULT 0 COMMENT 'template required 0 none / 1 must', - UNIQUE INDEX `uniq_key_ectype` (`key`,`engine_conn_type`), + UNIQUE KEY `uniq_key_ectype` (`key`,`engine_conn_type`), PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_key_engine_relation`; @@ -66,7 +59,7 @@ CREATE TABLE `linkis_ps_configuration_key_engine_relation`( `engine_type_label_id` bigint(20) NOT NULL COMMENT 'engine label id', PRIMARY KEY (`id`), UNIQUE INDEX `uniq_kid_lid` (`config_key_id`, `engine_type_label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_config_value`; @@ -79,7 +72,7 @@ CREATE TABLE `linkis_ps_configuration_config_value`( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE INDEX `uniq_kid_lid` (`config_key_id`, `config_label_id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_category`; CREATE TABLE `linkis_ps_configuration_category` ( @@ -92,7 +85,7 @@ CREATE TABLE `linkis_ps_configuration_category` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE INDEX `uniq_label_id` (`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; CREATE TABLE IF NOT EXISTS `linkis_ps_configuration_template_config_key` ( @@ -178,7 +171,7 @@ CREATE TABLE `linkis_ps_job_history_group_history` ( PRIMARY KEY (`id`), KEY `idx_created_time` (`created_time`), KEY `idx_submit_user` (`submit_user`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; DROP TABLE IF EXISTS `linkis_ps_job_history_detail`; @@ -194,19 +187,19 @@ CREATE TABLE `linkis_ps_job_history_detail` ( `status` varchar(32) DEFAULT NULL COMMENT 'status', `priority` int(4) DEFAULT 0 COMMENT 'order of subjob', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; DROP TABLE IF EXISTS `linkis_ps_common_lock`; CREATE TABLE `linkis_ps_common_lock` ( `id` int(11) NOT NULL AUTO_INCREMENT, `lock_object` varchar(255) COLLATE utf8_bin DEFAULT NULL, - `locker` VARCHAR(255) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT NULL COMMENT 'locker', + `locker` varchar(255) COLLATE utf8_bin NOT NULL, `time_out` longtext COLLATE utf8_bin, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_lock_object` (`lock_object`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -222,7 +215,7 @@ CREATE TABLE `linkis_ps_udf_manager` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -237,7 +230,7 @@ CREATE TABLE `linkis_ps_udf_shared_group` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_shared_info`; CREATE TABLE `linkis_ps_udf_shared_info` @@ -247,7 +240,7 @@ CREATE TABLE `linkis_ps_udf_shared_info` `user_name` varchar(50) NOT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Table structure for linkis_ps_udf_tree @@ -256,7 +249,7 @@ DROP TABLE IF EXISTS `linkis_ps_udf_tree`; CREATE TABLE `linkis_ps_udf_tree` ( `id` bigint(20) NOT NULL AUTO_INCREMENT, `parent` bigint(20) NOT NULL, - `name` varchar(50) DEFAULT NULL COMMENT 'Category name of the function. It would be displayed in the front-end', + `name` varchar(100) DEFAULT NULL COMMENT 'Category name of the function. It would be displayed in the front-end', `user_name` varchar(50) NOT NULL, `description` varchar(255) DEFAULT NULL, `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -264,7 +257,7 @@ CREATE TABLE `linkis_ps_udf_tree` ( `category` varchar(50) DEFAULT NULL COMMENT 'Used to distinguish between udf and function', PRIMARY KEY (`id`), UNIQUE KEY `uniq_parent_name_uname_category` (`parent`,`name`,`user_name`,`category`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- @@ -280,7 +273,7 @@ CREATE TABLE `linkis_ps_udf_user_load` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_uid_uname` (`udf_id`, `user_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_ps_udf_baseinfo`; CREATE TABLE `linkis_ps_udf_baseinfo` ( @@ -296,7 +289,7 @@ CREATE TABLE `linkis_ps_udf_baseinfo` ( `is_expire` bit(1) DEFAULT NULL, `is_shared` bit(1) DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- bdp_easy_ide.linkis_ps_udf_version definition DROP TABLE IF EXISTS `linkis_ps_udf_version`; @@ -314,7 +307,7 @@ CREATE TABLE `linkis_ps_udf_version` ( `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, `md5` varchar(100) DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; SET FOREIGN_KEY_CHECKS=0; @@ -332,7 +325,7 @@ CREATE TABLE `linkis_ps_variable_key_user` ( UNIQUE KEY `uniq_aid_kid_uname` (`application_id`,`key_id`,`user_name`), KEY `idx_key_id` (`key_id`), KEY `idx_aid` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- ---------------------------- @@ -350,7 +343,7 @@ CREATE TABLE `linkis_ps_variable_key` ( `value_regex` varchar(100) DEFAULT NULL COMMENT 'Reserved word', PRIMARY KEY (`id`), KEY `idx_aid` (`application_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- ---------------------------- -- Table structure for linkis_ps_datasource_access @@ -364,7 +357,7 @@ CREATE TABLE `linkis_ps_datasource_access` ( `application_id` int(4) NOT NULL, `access_time` datetime NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_field @@ -384,7 +377,7 @@ CREATE TABLE `linkis_ps_datasource_field` ( `length` int(11) DEFAULT NULL, `mode_info` varchar(128) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_import @@ -396,7 +389,7 @@ CREATE TABLE `linkis_ps_datasource_import` ( `import_type` int(4) NOT NULL, `args` varchar(255) COLLATE utf8_bin NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_lineage @@ -408,7 +401,7 @@ CREATE TABLE `linkis_ps_datasource_lineage` ( `source_table` varchar(64) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_table @@ -434,7 +427,7 @@ CREATE TABLE `linkis_ps_datasource_table` ( `is_available` tinyint(1) NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uniq_db_name` (`database`,`name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_datasource_table_info @@ -451,7 +444,7 @@ CREATE TABLE `linkis_ps_datasource_table_info` ( `update_time` datetime NOT NULL, `field_num` int(11) NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; @@ -475,7 +468,7 @@ CREATE TABLE `linkis_ps_cs_context_map` ( PRIMARY KEY (`id`), UNIQUE KEY `uniq_key_cid_ctype` (`key`,`context_id`,`context_type`), KEY `idx_keywords` (`keywords`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- ---------------------------- -- Table structure for linkis_ps_cs_context_map_listener @@ -489,7 +482,7 @@ CREATE TABLE `linkis_ps_cs_context_map_listener` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- ---------------------------- -- Table structure for linkis_ps_cs_context_history @@ -507,7 +500,7 @@ CREATE TABLE `linkis_ps_cs_context_history` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', KEY `idx_keyword` (`keyword`(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- ---------------------------- -- Table structure for linkis_ps_cs_context_id @@ -520,8 +513,8 @@ CREATE TABLE `linkis_ps_cs_context_id` ( `source` varchar(255) DEFAULT NULL, `expire_type` varchar(32) DEFAULT NULL, `expire_time` datetime DEFAULT NULL, - `instance` varchar(64) DEFAULT NULL, - `backup_instance` varchar(64) DEFAULT NULL, + `instance` varchar(128) DEFAULT NULL, + `backup_instance` varchar(255) DEFAULT NULL, `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', @@ -529,7 +522,7 @@ CREATE TABLE `linkis_ps_cs_context_id` ( KEY `idx_instance` (`instance`(128)), KEY `idx_backup_instance` (`backup_instance`(191)), KEY `idx_instance_bin` (`instance`(128),`backup_instance`(128)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; -- ---------------------------- -- Table structure for linkis_ps_cs_context_listener @@ -543,7 +536,7 @@ CREATE TABLE `linkis_ps_cs_context_listener` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', `access_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'last access time', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; DROP TABLE IF EXISTS `linkis_ps_bml_resources`; @@ -565,7 +558,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources` ( `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', unique key `uniq_rid_eflag`(`resource_id`, `enable_flag`), PRIMARY KEY (`id`) -) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8mb4; DROP TABLE IF EXISTS `linkis_ps_bml_resources_version`; @@ -586,7 +579,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources_version` ( `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', unique key `uniq_rid_version`(`resource_id`, `version`), PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; @@ -600,7 +593,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources_permission` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'updated time', `updator` varchar(50) NOT NULL COMMENT 'updator', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; @@ -615,7 +608,7 @@ CREATE TABLE if not exists `linkis_ps_resources_download_history` ( `version` varchar(20) not null, `downloader` varchar(50) NOT NULL COMMENT 'Downloader', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; @@ -639,7 +632,7 @@ CREATE TABLE if not exists `linkis_ps_bml_resources_task` ( `last_update_time` datetime NOT NULL COMMENT 'Last update time', unique key `uniq_rid_version` (resource_id, version), PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4; @@ -655,7 +648,7 @@ create table if not exists linkis_ps_bml_project( `create_time` datetime DEFAULT now(), unique key `uniq_name` (`name`), PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin ROW_FORMAT=COMPACT; +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; @@ -670,7 +663,7 @@ create table if not exists linkis_ps_bml_project_user( `expire_time` datetime default null, unique key `uniq_name_pid`(`username`, `project_id`), PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin ROW_FORMAT=COMPACT; +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; DROP TABLE IF EXISTS `linkis_ps_bml_project_resource`; @@ -679,34 +672,34 @@ create table if not exists linkis_ps_bml_project_resource( `project_id` int(10) NOT NULL, `resource_id` varchar(128) DEFAULT NULL, PRIMARY KEY (`id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin ROW_FORMAT=COMPACT; +)ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT; DROP TABLE IF EXISTS `linkis_ps_instance_label`; CREATE TABLE `linkis_ps_instance_label` ( `id` int(20) NOT NULL AUTO_INCREMENT, `label_key` varchar(32) COLLATE utf8_bin NOT NULL COMMENT 'string key', - `label_value` varchar(128) COLLATE utf8_bin NOT NULL COMMENT 'string value', + `label_value` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'string value', `label_feature` varchar(16) COLLATE utf8_bin NOT NULL COMMENT 'store the feature of label, but it may be redundant', `label_value_size` int(20) NOT NULL COMMENT 'size of key -> value map', `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_lk_lv` (`label_key`,`label_value`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_instance_label_value_relation`; CREATE TABLE `linkis_ps_instance_label_value_relation` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_value_key` varchar(128) COLLATE utf8_bin NOT NULL COMMENT 'value key', + `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL COMMENT 'value key', `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL COMMENT 'value content', `label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_ps_instance_label -> id', `update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp', `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_lvk_lid` (`label_value_key`,`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_instance_label_relation`; CREATE TABLE `linkis_ps_instance_label_relation` ( @@ -717,7 +710,7 @@ CREATE TABLE `linkis_ps_instance_label_relation` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_lid_instance` (`label_id`,`service_instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_instance_info`; @@ -729,7 +722,7 @@ CREATE TABLE `linkis_ps_instance_info` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp', PRIMARY KEY (`id`), UNIQUE KEY `uniq_instance` (`instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_ps_error_code`; CREATE TABLE `linkis_ps_error_code` ( @@ -739,8 +732,8 @@ CREATE TABLE `linkis_ps_error_code` ( `error_regex` varchar(1024) DEFAULT NULL, `error_type` int(3) DEFAULT 0, PRIMARY KEY (`id`), - UNIQUE INDEX `idx_error_regex` (error_regex(191)) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + UNIQUE INDEX `idx_error_regex` (error_regex(255)) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_service_instance`; CREATE TABLE `linkis_cg_manager_service_instance` ( @@ -755,10 +748,9 @@ CREATE TABLE `linkis_cg_manager_service_instance` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, `updator` varchar(32) COLLATE utf8_bin DEFAULT NULL, `creator` varchar(32) COLLATE utf8_bin DEFAULT NULL, - `params` text COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uniq_instance` (`instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_linkis_resources`; CREATE TABLE `linkis_cg_manager_linkis_resources` ( @@ -776,7 +768,7 @@ CREATE TABLE `linkis_cg_manager_linkis_resources` ( `updator` varchar(255) COLLATE utf8_bin DEFAULT NULL, `creator` varchar(255) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_lock`; CREATE TABLE `linkis_cg_manager_lock` ( @@ -786,7 +778,7 @@ CREATE TABLE `linkis_cg_manager_lock` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_rm_external_resource_provider`; CREATE TABLE `linkis_cg_rm_external_resource_provider` ( @@ -796,7 +788,7 @@ CREATE TABLE `linkis_cg_rm_external_resource_provider` ( `labels` varchar(32) DEFAULT NULL, `config` text NOT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; DROP TABLE IF EXISTS `linkis_cg_manager_engine_em`; CREATE TABLE `linkis_cg_manager_engine_em` ( @@ -806,33 +798,32 @@ CREATE TABLE `linkis_cg_manager_engine_em` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label`; CREATE TABLE `linkis_cg_manager_label` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_key` varchar(32) COLLATE utf8_bin NOT NULL, - `label_value` varchar(128) COLLATE utf8_bin NOT NULL, + `label_key` varchar(50) COLLATE utf8_bin NOT NULL, + `label_value` varchar(255) COLLATE utf8_bin NOT NULL, `label_feature` varchar(16) COLLATE utf8_bin NOT NULL, `label_value_size` int(20) NOT NULL, `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_lk_lv` (`label_key`,`label_value`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_value_relation`; CREATE TABLE `linkis_cg_manager_label_value_relation` ( `id` int(20) NOT NULL AUTO_INCREMENT, - `label_value_key` varchar(128) COLLATE utf8_bin NOT NULL, + `label_value_key` varchar(255) COLLATE utf8_bin NOT NULL, `label_value_content` varchar(255) COLLATE utf8_bin DEFAULT NULL, `label_id` int(20) DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), - UNIQUE KEY `uniq_lvk_lid` (`label_value_key`,`label_id`), - UNIQUE KEY `unlid_lvk_lvc` (`label_id`,`label_value_key`,`label_value_content`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + UNIQUE KEY `uniq_lvk_lid` (`label_value_key`,`label_id`) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_resource`; CREATE TABLE `linkis_cg_manager_label_resource` ( @@ -843,16 +834,16 @@ CREATE TABLE `linkis_cg_manager_label_resource` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_label_id` (`label_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_ec_resource_info_record`; CREATE TABLE `linkis_cg_ec_resource_info_record` ( `id` INT(20) NOT NULL AUTO_INCREMENT, - `label_value` VARCHAR(128) NOT NULL COMMENT 'ec labels stringValue', + `label_value` VARCHAR(255) NOT NULL COMMENT 'ec labels stringValue', `create_user` VARCHAR(128) NOT NULL COMMENT 'ec create user', `service_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'ec instance info', `ecm_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'ecm instance info ', - `ticket_id` VARCHAR(36) NOT NULL COMMENT 'ec ticket id', + `ticket_id` VARCHAR(100) NOT NULL COMMENT 'ec ticket id', `status` varchar(50) DEFAULT NULL COMMENT 'EC status: Starting,Unlock,Locked,Idle,Busy,Running,ShuttingDown,Failed,Success', `log_dir_suffix` varchar(128) COLLATE utf8_bin DEFAULT NULL COMMENT 'log path', `request_times` INT(8) COMMENT 'resource request times', @@ -868,8 +859,8 @@ CREATE TABLE `linkis_cg_ec_resource_info_record` ( PRIMARY KEY (`id`), KEY `idx_ticket_id` (`ticket_id`), UNIQUE KEY `uniq_tid_lv` (`ticket_id`,`label_value`), - UNIQUE KEY `uniq_sinstance_status_cuser_ctime` (`service_instance`, `status`, `create_user`, `create_time`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; + UNIQUE KEY uniq_sinstance_status_cuser_ctime (service_instance, status, create_user, create_time) +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_service_instance`; CREATE TABLE `linkis_cg_manager_label_service_instance` ( @@ -880,7 +871,7 @@ CREATE TABLE `linkis_cg_manager_label_service_instance` ( `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), KEY `idx_lid_instance` (`label_id`,`service_instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_label_user`; @@ -891,7 +882,7 @@ CREATE TABLE `linkis_cg_manager_label_user` ( `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_metrics_history`; @@ -907,7 +898,7 @@ CREATE TABLE `linkis_cg_manager_metrics_history` ( `serviceName` varchar(255) COLLATE utf8_bin DEFAULT NULL, `instance` varchar(255) COLLATE utf8_bin DEFAULT NULL, PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_manager_service_instance_metrics`; CREATE TABLE `linkis_cg_manager_service_instance_metrics` ( @@ -918,9 +909,9 @@ CREATE TABLE `linkis_cg_manager_service_instance_metrics` ( `healthy_status` varchar(255) COLLATE utf8_bin DEFAULT NULL, `update_time` datetime DEFAULT CURRENT_TIMESTAMP, `create_time` datetime DEFAULT CURRENT_TIMESTAMP, - `description` varchar(256) COLLATE utf8_bin NOT NULL DEFAULT '', + description varchar(256) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT '', PRIMARY KEY (`instance`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; DROP TABLE IF EXISTS `linkis_cg_engine_conn_plugin_bml_resources`; CREATE TABLE `linkis_cg_engine_conn_plugin_bml_resources` ( @@ -935,7 +926,7 @@ CREATE TABLE `linkis_cg_engine_conn_plugin_bml_resources` ( `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'created time', `last_update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'updated time', PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource @@ -949,7 +940,7 @@ CREATE TABLE `linkis_ps_dm_datasource` `datasource_type_id` int(11) NOT NULL, `create_identify` varchar(255) COLLATE utf8_bin DEFAULT NULL, `create_system` varchar(255) COLLATE utf8_bin DEFAULT NULL, - `parameter` varchar(2048) COLLATE utf8_bin NULL DEFAULT NULL, + `parameter` varchar(1024) COLLATE utf8_bin NULL DEFAULT NULL, `create_time` datetime NULL DEFAULT CURRENT_TIMESTAMP, `modify_time` datetime NULL DEFAULT CURRENT_TIMESTAMP, `create_user` varchar(255) COLLATE utf8_bin DEFAULT NULL, @@ -960,7 +951,7 @@ CREATE TABLE `linkis_ps_dm_datasource` `published_version_id` int(11) DEFAULT NULL, PRIMARY KEY (`id`), UNIQUE INDEX `uniq_datasource_name` (`datasource_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource_env @@ -972,7 +963,7 @@ CREATE TABLE `linkis_ps_dm_datasource_env` `env_name` varchar(32) COLLATE utf8_bin NOT NULL, `env_desc` varchar(255) COLLATE utf8_bin DEFAULT NULL, `datasource_type_id` int(11) NOT NULL, - `parameter` varchar(2048) COLLATE utf8_bin DEFAULT NULL, + `parameter` varchar(1024) COLLATE utf8_bin DEFAULT NULL, `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `create_user` varchar(255) COLLATE utf8_bin NULL DEFAULT NULL, `modify_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, @@ -980,7 +971,7 @@ CREATE TABLE `linkis_ps_dm_datasource_env` PRIMARY KEY (`id`), UNIQUE KEY `uniq_env_name` (`env_name`), UNIQUE INDEX `uniq_name_dtid` (`env_name`, `datasource_type_id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- @@ -1001,7 +992,7 @@ CREATE TABLE `linkis_ps_dm_datasource_type` `classifier_en` varchar(32) DEFAULT NULL COMMENT 'english classifier', PRIMARY KEY (`id`), UNIQUE INDEX `uniq_name` (`name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource_type_key @@ -1013,7 +1004,7 @@ CREATE TABLE `linkis_ps_dm_datasource_type_key` `data_source_type_id` int(11) NOT NULL, `key` varchar(32) COLLATE utf8_bin NOT NULL, `name` varchar(32) COLLATE utf8_bin NOT NULL, - `name_en` varchar(32) COLLATE utf8_bin NULL DEFAULT NULL, + `name_en` varchar(32) COLLATE utf8_bin NOT NULL, `default_value` varchar(50) COLLATE utf8_bin NULL DEFAULT NULL, `value_type` varchar(50) COLLATE utf8_bin NOT NULL, `scope` varchar(50) COLLATE utf8_bin NULL DEFAULT NULL, @@ -1028,7 +1019,7 @@ CREATE TABLE `linkis_ps_dm_datasource_type_key` `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (`id`), UNIQUE KEY `uniq_dstid_key` (`data_source_type_id`, `key`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_ps_dm_datasource_version -- ---------------------------- @@ -1042,7 +1033,7 @@ CREATE TABLE `linkis_ps_dm_datasource_version` `create_time` datetime(0) NULL DEFAULT CURRENT_TIMESTAMP, `create_user` varchar(255) COLLATE utf8_bin NULL DEFAULT NULL, PRIMARY KEY `uniq_vid_did` (`version_id`, `datasource_id`) USING BTREE -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_mg_gateway_auth_token @@ -1060,7 +1051,7 @@ CREATE TABLE `linkis_mg_gateway_auth_token` ( `update_by` varchar(32), PRIMARY KEY (`id`), UNIQUE KEY `uniq_token_name` (`token_name`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB DEFAULT CHARSET=utf8; @@ -1077,10 +1068,10 @@ CREATE TABLE `linkis_cg_tenant_label_config` ( `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, `desc` varchar(100) COLLATE utf8_bin NOT NULL, `bussiness_user` varchar(50) COLLATE utf8_bin NOT NULL, - `is_valid` varchar(1) COLLATE utf8_bin NOT NULL DEFAULT 'Y' COMMENT 'is valid', + `is_valid` varchar(1) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT 'Y' COMMENT 'is valid', PRIMARY KEY (`id`), UNIQUE KEY `uniq_user_creator` (`user`,`creator`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_cg_user_ip_config @@ -1097,9 +1088,7 @@ CREATE TABLE `linkis_cg_user_ip_config` ( `bussiness_user` varchar(50) COLLATE utf8_bin NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `uniq_user_creator` (`user`,`creator`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - - +) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8 COLLATE=utf8_bin; -- ---------------------------- -- Table structure for linkis_org_user @@ -1119,218 +1108,3 @@ CREATE TABLE `linkis_org_user` ( `user_itsm_no` varchar(64) COMMENT 'user itsm no', PRIMARY KEY (`user_name`) ) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='user org info'; - - - - - - --- 商业化 未开源的放在最后面 上面的sql 和开源保持一致 --- ---------------------------- --- Table structure for linkis_cg_synckey --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cg_synckey`; -CREATE TABLE `linkis_cg_synckey` ( - `username` char(32) NOT NULL, - `synckey` char(32) NOT NULL, - `instance` varchar(32) NOT NULL, - `create_time` datetime(3) NOT NULL, - PRIMARY KEY (`username`, `synckey`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - - - --- ---------------------------- --- Table structure for linkis_et_validator_checkinfo --- ---------------------------- -DROP TABLE IF EXISTS `linkis_et_validator_checkinfo`; -CREATE TABLE `linkis_et_validator_checkinfo` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT, - `execute_user` varchar(64) COLLATE utf8_bin NOT NULL, - `db_name` varchar(64) COLLATE utf8_bin DEFAULT NULL, - `params` text COLLATE utf8_bin, - `code_type` varchar(32) COLLATE utf8_bin NOT NULL, - `operation_type` varchar(32) COLLATE utf8_bin NOT NULL, - `status` tinyint(4) DEFAULT NULL, - `code` text COLLATE utf8_bin, - `msg` text COLLATE utf8_bin, - `risk_level` varchar(32) COLLATE utf8_bin DEFAULT NULL, - `hit_rules` text COLLATE utf8_bin, - `create_time` DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - --- ---------------------------- --- Table structure for linkis_ps_bml_cleaned_resources_version --- ---------------------------- -DROP TABLE IF EXISTS `linkis_ps_bml_cleaned_resources_version`; -CREATE TABLE `linkis_ps_bml_cleaned_resources_version` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键', - `resource_id` varchar(50) NOT NULL COMMENT '资源id,资源的uuid', - `file_md5` varchar(32) NOT NULL COMMENT '文件的md5摘要', - `version` varchar(20) NOT NULL COMMENT '资源版本(v 加上 五位数字)', - `size` int(10) NOT NULL COMMENT '文件大小', - `start_byte` bigint(20) unsigned NOT NULL DEFAULT '0', - `end_byte` bigint(20) unsigned NOT NULL DEFAULT '0', - `resource` varchar(2000) NOT NULL COMMENT '资源内容(文件信息 包括 路径和文件名)', - `description` varchar(2000) DEFAULT NULL COMMENT '描述', - `start_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '开始时间', - `end_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '结束时间', - `client_ip` varchar(200) NOT NULL COMMENT '客户端ip', - `updator` varchar(50) DEFAULT NULL COMMENT '修改者', - `enable_flag` tinyint(1) NOT NULL DEFAULT '1' COMMENT '状态,1:正常,0:冻结', - `old_resource` varchar(2000) NOT NULL COMMENT '旧的路径', - PRIMARY KEY (`id`), - UNIQUE KEY `resource_id_version` (`resource_id`,`version`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - - --- ---------------------------- --- Table structure for linkis_ps_configuration_across_cluster_rule --- ---------------------------- -DROP TABLE IF EXISTS `linkis_ps_configuration_across_cluster_rule`; -CREATE TABLE `linkis_ps_configuration_across_cluster_rule` ( - id INT AUTO_INCREMENT COMMENT '规则ID,自增主键', - cluster_name char(32) NOT NULL COMMENT '集群名称,不能为空', - creator char(32) NOT NULL COMMENT '创建者,不能为空', - username char(32) NOT NULL COMMENT '用户,不能为空', - create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间,不能为空', - create_by char(32) NOT NULL COMMENT '创建者,不能为空', - update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间,不能为空', - update_by char(32) NOT NULL COMMENT '更新者,不能为空', - rules varchar(512) NOT NULL COMMENT '规则内容,不能为空', - is_valid VARCHAR(2) DEFAULT 'N' COMMENT '是否有效 Y/N', - PRIMARY KEY (id), - UNIQUE KEY idx_creator_username (creator, username) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - --- ---------------------------- --- Table structure for linkis_ps_configuration_template_config_key --- ---------------------------- -DROP TABLE IF EXISTS `linkis_ps_configuration_template_config_key`; -CREATE TABLE `linkis_ps_configuration_template_config_key` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `template_name` VARCHAR(200) NOT NULL COMMENT '配置模板名称 冗余存储', - `template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', - `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', - `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT '配置值', - `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '上限值', - `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '下限值(预留)', - `validate_range` VARCHAR(50) NULL DEFAULT NULL COMMENT '校验正则(预留) ', - `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', - `create_by` VARCHAR(50) NOT NULL COMMENT '创建人', - `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', - `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT '更新人', - `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', - PRIMARY KEY (`id`), - UNIQUE INDEX `uniq_tid_kid` (`template_uuid`, `key_id`), - UNIQUE INDEX `uniq_tname_kid` (`template_uuid`, `key_id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - --- ---------------------------- --- Table structure for linkis_ps_configuration_key_limit_for_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_ps_configuration_key_limit_for_user`; -CREATE TABLE `linkis_ps_configuration_key_limit_for_user` ( - `id` BIGINT(20) NOT NULL AUTO_INCREMENT, - `user_name` VARCHAR(50) NOT NULL COMMENT '用户名', - `combined_label_value` VARCHAR(128) NOT NULL COMMENT '组合标签 combined_userCreator_engineType 如 hadoop-IDE,spark-2.4.3', - `key_id` BIGINT(20) NOT NULL COMMENT 'id of linkis_ps_configuration_config_key', - `config_value` VARCHAR(200) NULL DEFAULT NULL COMMENT '配置值', - `max_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '上限值', - `min_value` VARCHAR(50) NULL DEFAULT NULL COMMENT '下限值(预留)', - `latest_update_template_uuid` VARCHAR(36) NOT NULL COMMENT 'uuid 第三方侧记录的模板id', - `is_valid` VARCHAR(2) DEFAULT 'Y' COMMENT '是否有效 预留 Y/N', - `create_by` VARCHAR(50) NOT NULL COMMENT '创建人', - `create_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'create time', - `update_by` VARCHAR(50) NULL DEFAULT NULL COMMENT '更新人', - `update_time` DATETIME DEFAULT CURRENT_TIMESTAMP COMMENT 'update time', - PRIMARY KEY (`id`), - UNIQUE INDEX `uniq_com_label_kid` (`combined_label_value`, `key_id`) -)ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - - - --- ---------------------------- --- Table structure for linkis_org_user_sync --- ---------------------------- -DROP TABLE IF EXISTS `linkis_org_user_sync`; -CREATE TABLE `linkis_org_user_sync` ( - `cluster_code` varchar(16) COMMENT '集群', - `user_type` varchar(64) COMMENT '用户类型', - `user_name` varchar(128) COMMENT '授权用户', - `org_id` varchar(16) COMMENT '部门ID', - `org_name` varchar(64) COMMENT '部门名字', - `queue_name` varchar(64) COMMENT '默认资源队列', - `db_name` varchar(64) COMMENT '默认操作数据库', - `interface_user` varchar(64) COMMENT '接口人', - `is_union_analyse` varchar(64) COMMENT '是否联合分析人', - `create_time` varchar(64) COMMENT '用户创建时间', - `user_itsm_no` varchar(64) COMMENT '用户创建单号', - PRIMARY KEY (`user_name`) -) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='用户部门统计INC表'; - --- ---------------------------- --- Table structure for linkis_cg_tenant_department_config --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cg_tenant_department_config`; -CREATE TABLE `linkis_cg_tenant_department_config` ( - `id` int(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', - `creator` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '应用', - `department` varchar(64) COLLATE utf8_bin NOT NULL COMMENT '部门名称', - `department_id` varchar(16) COLLATE utf8_bin NOT NULL COMMENT '部门ID', - `tenant_value` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '部门租户标签', - `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', - `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', - `create_by` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '创建用户', - `is_valid` varchar(1) COLLATE utf8_bin NOT NULL DEFAULT 'Y' COMMENT '是否有效', - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_creator_department` (`creator`,`department`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - --- ---------------------------- --- Table structure for linkis_mg_gateway_whitelist_config --- ---------------------------- -DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_config`; -CREATE TABLE `linkis_mg_gateway_whitelist_config` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `allowed_user` varchar(128) COLLATE utf8_bin NOT NULL, - `client_address` varchar(128) COLLATE utf8_bin NOT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `address_uniq` (`allowed_user`, `client_address`), - KEY `linkis_mg_gateway_whitelist_config_allowed_user` (`allowed_user`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - --- ---------------------------- --- Table structure for linkis_mg_gateway_whitelist_sensitive_user --- ---------------------------- -DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_sensitive_user`; -CREATE TABLE `linkis_mg_gateway_whitelist_sensitive_user` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `sensitive_username` varchar(128) COLLATE utf8_bin NOT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `sensitive_username` (`sensitive_username`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - --- ---------------------------- --- Table structure for linkis_ps_python_module_info --- ---------------------------- -DROP TABLE IF EXISTS `linkis_ps_python_module_info`; -CREATE TABLE `linkis_ps_python_module_info` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '自增id', - `name` varchar(255) NOT NULL COMMENT 'python模块名称', - `description` text COMMENT 'python模块描述', - `path` varchar(255) NOT NULL COMMENT 'hdfs路径', - `engine_type` varchar(50) NOT NULL COMMENT '引擎类型,python/spark/all', - `create_user` varchar(50) NOT NULL COMMENT '创建用户', - `update_user` varchar(50) NOT NULL COMMENT '修改用户', - `is_load` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否加载,0-未加载,1-已加载', - `is_expire` tinyint(1) DEFAULT NULL COMMENT '是否过期,0-未过期,1-已过期)', - `create_time` datetime NOT NULL COMMENT '创建时间', - `update_time` datetime NOT NULL COMMENT '修改时间', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='Python模块包信息表'; \ No newline at end of file diff --git a/linkis-dist/package/db/linkis_dml.sql b/linkis-dist/package/db/linkis_dml.sql index dea1719942..d786b58444 100644 --- a/linkis-dist/package/db/linkis_dml.sql +++ b/linkis-dist/package/db/linkis_dml.sql @@ -28,7 +28,6 @@ SET @TRINO_LABEL="trino-371"; SET @IO_FILE_LABEL="io_file-1.0"; SET @OPENLOOKENG_LABEL="openlookeng-1.5.0"; SET @ELASTICSEARCH_LABEL="elasticsearch-7.6.2"; -SET @NEBULA_LABEL="nebula-3.0.0"; -- 衍生变量: SET @SPARK_ALL=CONCAT('*-*,',@SPARK_LABEL); @@ -68,38 +67,33 @@ SET @IO_FILE_IDE=CONCAT('*-IDE,',@IO_FILE_LABEL); SET @ELASTICSEARCH_ALL=CONCAT('*-*,',@ELASTICSEARCH_LABEL); SET @ELASTICSEARCH_IDE=CONCAT('*-IDE,',@ELASTICSEARCH_LABEL); -SET @NEBULA_ALL=CONCAT('*-*,',@NEBULA_LABEL); -SET @NEBULA_IDE=CONCAT('*-IDE,',@NEBULA_LABEL); - -- Global Settings INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-4000,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d{0,2}|[1-3]\\d{3}|4000)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-10000,单位:G', '队列内存使用上限', '300G', 'Regex', '^(?:[1-9]\\d{0,3}|10000)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('linkis.entrance.creator.job.concurrency.limit', 'Creator级别限制,范围:1-10000,单位:个', 'Creator最大并发数', '10000', 'NumInterval', '[1,10000]', '', 0, 1, 1, '队列资源', 3, 'creator maximum task limit', 'creator maximum task limit', 'QueueResources', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '128', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '500', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '1000G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '100G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '128', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源'); -- spark -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark', '1'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '1', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '1', 'NumInterval', '[1,8]', '0', '0', '1','spark资源设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('spark.executor.memory', '取值范围:1-28,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-9]|2[0-8])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:1-15,单位:G', 'spark执行器内存大小', '1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark', '1'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h,6h,12h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"6h\",\"12h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','1g', 'Regex', '^([1-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'spark引擎设置', 'spark'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.compress=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.shuffle.compress=true;', 'Spark Custom Configuration Parameters'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.conf', '多个参数使用分号[;]分隔 例如spark.shuffle.spill=true;', 'spark自定义配置参数',null, 'None', NULL, 'spark',0, 1, 1,'spark资源设置', 0, 'Spark Resource Settings','Multiple parameters are separated by semicolons [;] For example, spark.sql.shuffle.partitions=10;', 'Spark Custom Configuration Parameters'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.locality.wait', '范围:0-3,单位:秒', '任务调度本地等待时间', '3s', 'OFT', '[\"0s\",\"1s\",\"2s\",\"3s\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0-3, Unit: second', 'Task Scheduling Local Waiting Time'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`, `boundary_type`, `en_treeName`, `en_description`, `en_name`) VALUES ('spark.memory.fraction', '范围:0.4,0.5,0.6,单位:百分比', '执行内存和存储内存的百分比', '0.6', 'OFT', '[\"0.4\",\"0.5\",\"0.6\"]', 'spark', 0, 1, 1, 'spark资源设置', 0, 'Spark Resource Settings', 'Range: 0.4, 0.5, 0.6, in percentage', 'Percentage Of Execution Memory And Storage Memory'); -- hive -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive', '1'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`, `template_required`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive', '1'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '20', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','1g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:-1-10000,单位:个', 'reduce数', '-1', 'NumInterval', '[-1,10000]', '0', '1', '1', 'hive资源设置', 'hive'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.max.free.time', '取值范围:3m,15m,30m,1h,2h', '引擎空闲退出时间','1h', 'OFT', '[\"1h\",\"2h\",\"30m\",\"15m\",\"3m\"]', '0', '0', '1', 'hive引擎设置', 'hive'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ("mapreduce.job.running.reduce.limit", '范围:10-999,单位:个', 'hive引擎reduce限制', '999', 'NumInterval', '[10,999]', '0', '1', '1', 'MapReduce设置', 'hive','Value Range: 10-999, Unit: Piece', 'Number Limit Of MapReduce Job Running Reduce', 'MapReduce Settings', '1'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('mapreduce.job.reduce.slowstart.completedmaps', '取值范围:0-1', 'Map任务数与总Map任务数之间的比例','0.05', 'Regex', '^(0(\\.\\d{1,2})?|1(\\.0{1,2})?)$', '0', '0', '1', 'hive引擎设置', 'hive', 'Value Range: 0-1', 'The Ratio Between The Number Of Map Tasks And The Total Number Of Map Tasks', 'Hive Engine Settings', '1'); -- python INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python'); @@ -110,7 +104,7 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, -- pipeline INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t或;或|', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\",\"\\\\;\",\"\\\\|\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); +INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwrite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline'); @@ -174,20 +168,8 @@ INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore.type', 'Trino服务器SSL truststore类型', 'truststore类型', 'null', 'None', '', 'trino', 0, 0, 1, '数据源配置'); INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `engine_conn_type`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('linkis.trino.ssl.truststore.password', 'Trino服务器SSL truststore密码', 'truststore密码', 'null', 'None', '', 'trino', 0, 0, 1, '数据源配置'); --- nebula -INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.host','Nebula 连接地址','Nebula 连接地址',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Host','Nebula Host',0); -INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.port','Nebula 连接端口','Nebula 连接端口',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Port','Nebula Port',0); -INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.username','Nebula 连接用户名','Nebula 连接用户名',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Username','Nebula Username',0); -INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.password','Nebula 连接密码','Nebula 连接密码',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Password','Nebula Password',0); -INSERT INTO `linkis_ps_configuration_config_key` (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.space', 'Nebula 图空间', 'Nebula 图空间', NULL, 'None', NULL, 'nebula', 0, 0, 1, 'Necula引擎设置', 0, 'Nebula Engine Settings', 'Nebula Space', 'Nebula Space', 0); - -- Configuration first level directory -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-全局设置,*-*', 'OPTIONAL', 2, now(), now()); +insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-GlobalSettings,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-Visualis,*-*', 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-nodeexecution,*-*', 'OPTIONAL', 2, now(), now()); @@ -204,13 +186,12 @@ insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_featur insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @ELASTICSEARCH_ALL, 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @PRESTO_ALL, 'OPTIONAL', 2, now(), now()); insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @TRINO_ALL, 'OPTIONAL', 2, now(), now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @NEBULA_IDE,'OPTIONAL',2,now(),now()); -insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType', @NEBULA_ALL,'OPTIONAL',2,now(),now()); + -- Custom correlation engine (e.g. spark) and configKey value -- Global Settings insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config -INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = '' and label.label_value = "*-*,*-*"); +INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type is null and label.label_value = "*-*,*-*"); -- spark(Here choose to associate all spark type Key values with spark) insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) @@ -263,11 +244,6 @@ insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `eng (select config.id as config_key_id, label.id AS engine_type_label_id FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'trino' and label_value = @TRINO_ALL); --- nebula-3.0.0 -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) -(select config.id as config_key_id, label.id AS engine_type_label_id FROM linkis_ps_configuration_config_key config -INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and label_value = @NEBULA_ALL); - -- If you need to customize the parameters of the new engine, the following configuration does not need to write SQL initialization -- Just write the SQL above, and then add applications and engines to the management console to automatically initialize the configuration @@ -386,27 +362,21 @@ insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_val INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @TRINO_ALL); --- nebula default configuration -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) -(select relation.config_key_id AS config_key_id, '' AS config_value, relation.engine_type_label_id AS config_label_id FROM `linkis_ps_configuration_key_engine_relation` relation -INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @NEBULA_ALL); - insert into `linkis_cg_rm_external_resource_provider`(`id`,`resource_type`,`name`,`labels`,`config`) values (1,'Yarn','default',NULL,'{"rmWebAddress":"@YARN_RESTFUL_URL","hadoopVersion":"@HADOOP_VERSION","authorEnable":@YARN_AUTH_ENABLE,"user":"@YARN_AUTH_USER","pwd":"@YARN_AUTH_PWD","kerberosEnable":@YARN_KERBEROS_ENABLE,"principalName":"@YARN_PRINCIPAL_NAME","keytabPath":"@YARN_KEYTAB_PATH","krb5Path":"@YARN_KRB5_PATH"}'); -- errorcode -- 01 linkis server INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01001','您的任务没有路由到后台ECM,请联系管理员','The em of labels',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01002','任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容','Unexpected end of file from server',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01003','任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容','failed to ask linkis Manager Can be retried SocketTimeoutException',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01002','Linkis服务负载过高,请联系管理员扩容','Unexpected end of file from server',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01003','Linkis服务负载过高,请联系管理员扩容','failed to ask linkis Manager Can be retried SocketTimeoutException',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01004','引擎在启动时被Kill,请联系管理员',' [0-9]+ Killed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01005','请求Yarn获取队列信息重试2次仍失败,请联系管理员','Failed to request external resourceClassCastException',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01006','没有健康可用的ecm节点,可能任务量大,导致节点资源处于不健康状态,尝试kill空闲引擎释放资源','There are corresponding ECM tenant labels',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01007','文件编码格式异常,请联系管理人员处理','UnicodeEncodeError.*characters',0); + -- 11 linkis resource 12 user resource 13 user task resouce INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01101','ECM资源不足,请联系管理员扩容','ECM resources are insufficient',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,可以设置更低的驱动内存','ECM memory resources are insufficient',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01102','ECM 内存资源不足,请联系管理员扩容','ECM memory resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01103','ECM CPU资源不足,请联系管理员扩容','ECM CPU resources are insufficient',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01104','ECM 实例资源不足,请联系管理员扩容','ECM Insufficient number of instances',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01105','机器内存不足,请联系管理员扩容','Cannot allocate memory',0); @@ -428,24 +398,21 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13001','Java进程内存溢出,建议优化脚本内容','OutOfMemoryError',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13002','任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','Container killed by YARN for exceeding memory limits',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13003','任务运行内存超过设置内存限制,请在管理台增加executor内存或调优sql后执行','read record exception',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13004','任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','failed because the engine quitted unexpectedly',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存','Spark application has already stopped',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存','Spark application sc has already stopped',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','Pyspark process has stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13002','使用资源过大,请调优sql或者加大资源','Container killed by YARN for exceeding memory limits',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13003','使用资源过大,请调优sql或者加大资源','read record exception',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13004','引擎意外退出,可能是使用资源过大导致','failed because the engine quitted unexpectedly',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13005','Spark app应用退出,可能是复杂任务导致','Spark application has already stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13006','Spark context退出,可能是复杂任务导致','Spark application sc has already stopped',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13007','Pyspark子进程意外退出,可能是复杂任务导致','Pyspark process has stopped',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13008','任务产生的序列化结果总大小超过了配置的spark.driver.maxResultSize限制。请检查您的任务,看看是否有可能减小任务产生的结果大小,或则可以考虑压缩或合并结果,以减少传输的数据量','is bigger than spark.driver.maxResultSize',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13009','您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败','ERROR EC exits unexpectedly and actively kills the task',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13010','任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存','Container exited with a non-zero exit code',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13011','广播表过大导致driver内存溢出,请在执行sql前增加参数后重试:set spark.sql.autoBroadcastJoinThreshold=-1;','dataFrame to local exception',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13012','driver内存不足,请增加driver内存后重试','Failed to allocate a page (\\S+.*\\)), try again.',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13013','使用spark默认变量sc导致后续代码执行失败','sc.setJobGroup(\\S+.*\\))',0); + -- 21 cluster Authority 22 db Authority INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue ([A-Za-z._0-9]+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21002','创建Python解释器失败,请联系管理员','initialize python executor failed',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('21003','创建单机Python解释器失败,请联系管理员','PythonSession process cannot be initialized',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22001','%s无权限访问,请申请开通数据表权限,请联系您的数据管理人员','Permission denied:\\s*user=[a-zA-Z0-9_]+[,,]\\s*access=[a-zA-Z0-9_]+\\s*[,,]\\s*inode="([a-zA-Z0-9/_\\.]+)"',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22001','%s无权限访问,请申请开通数据表权限,请联系您的数据管理人员','Permission denied:\\s*user=[a-zA-Z0-9_]+,\\s*access=[A-Z]+\\s*,\\s*inode="([a-zA-Z0-9/_\\.]+)"',0); -- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22002','您可能没有相关权限','Permission denied',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22003','所查库表无权限','Authorization failed:No privilege',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22004','用户%s在机器不存在,请确认是否申请了相关权限','user (\\S+) does not exist',0); @@ -453,15 +420,12 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22006','用户在机器不存在,请确认是否申请了相关权限','at com.sun.security.auth.UnixPrincipal',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22007','用户在机器不存在,请确认是否申请了相关权限','LoginException: java.lang.NullPointerException: invalid null input: name',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22008','用户在机器不存在,请确认是否申请了相关权限','User not known to the underlying authentication module',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22009','用户组不存在','FileNotFoundException: /tmp/?',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('22010','用户组不存在','error looking up the name of group',0); -- 30 Space exceeded 31 user operation INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30001','库超过限制','is exceeded',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('31001','用户主动kill任务','is killed by user',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('31002','您提交的EngineTypeLabel没有对应的引擎版本','EngineConnPluginNotFoundException',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30003','用户Token下发失败,请确认用户初始化是否成功。可联系BDP Hive运维处理','Auth failed for User',0); -- 41 not exist 44 sql 43 python 44 shell 45 scala 46 importExport INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('41001','数据库%s不存在,请检查引用的数据库是否有误','Database ''([a-zA-Z_0-9]+)'' not found',0); @@ -491,14 +455,14 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库%s中已经存在,请删除相应表后重试','Table or view ''(\\S+)'' already exists in database ''(\\S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','Table (\\S+) already exists',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','Table already exists',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (\\S+) already exists',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (S+) already exists',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42007','插入目标表字段数量不匹配,请检查代码!','requires that the data to be inserted have the same number of columns as the target table',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42008','数据类型不匹配,请检查代码!','due to data type mismatch: differing types in',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42009','字段%s引用有误,请检查字段是否存在!','Invalid column reference (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42010','字段%s提取数据失败','Can''t extract value from (\\S+): need',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42009','字段%s引用有误,请检查字段是否存在!','Invalid column reference (S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42010','字段%s提取数据失败','Can''t extract value from (S+): need',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42011','括号或者关键字不匹配,请检查代码!','mismatched input ''(\\S+)'' expecting',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42012','group by 位置2不在select列表中,请检查代码!','GROUP BY position (\\S+) is not in select list',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42013','字段提取数据失败请检查字段类型','Can''t extract value from (\\S+): need struct type but got string',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42012','group by 位置2不在select列表中,请检查代码!','GROUP BY position (S+) is not in select list',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42013','字段提取数据失败请检查字段类型','Can''t extract value from (S+): need struct type but got string',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42014','插入数据未指定目标表字段%s,请检查代码!','Cannot insert into target table because column number/types are different ''(S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42015','表别名%s错误,请检查代码!','Invalid table alias ''(\\S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('42016','UDF函数未指定参数,请检查代码!','UDFArgumentException Argument expected',0); @@ -530,46 +494,30 @@ INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43039','语法问题,请检查脚本','Distinct window functions are not supported',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43040','查询一定要指定数据源和库信息','Schema must be specified when session schema is not set',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43041','用户UDF函数 %s 加载失败,请检查后再执行','Invalid function (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43042','插入数据表动态分区数超过配置值 %s ,请优化sql或调整配置hive.exec.max.dynamic.partitions后重试','Maximum was set to (\\S+) partitions per node',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43043','执行任务消耗内存超过限制,hive任务请修改map或reduce的内存,spark任务请修改executor端内存','Error:java heap space',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43044','表 %s 分区数超过阈值 %s,需要分批删除分区,再删除表','the partitions of table (\\S+) exceeds threshold (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43045','查询/操作的表 %s 分区数为 %s ,超过阈值 %s ,需要限制查询/操作的分区数量','Number of partitions scanned \\(=(\\d+)\\) on table (\\S+) exceeds limit \\(=(\\d+)\\)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43046','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Number of dynamic partitions created is (\\S+), which is more than (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43047','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Maximum was set to (\\S+) partitions per node, number of dynamic partitions on this node: (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43048','参数引用错误,请检查参数 %s 是否正常引用','UnboundLocalError.*local variable (\\S+) referenced before assignment',0); -- 43 python INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43001','代码中存在NoneType空类型变量,请检查代码','''NoneType'' object',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43002','数组越界','IndexError:List index out of range',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43003','您的代码有语法错误,请您修改代码之后执行','SyntaxError',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43004','python代码变量%s未定义','name ''(\\S+)'' is not defined',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43005','python udf %s 未定义','Undefined function:s+''(\\S+)''',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43006','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(\\S+)'' and ''(\\S+)''',0); --- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43007','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0); --- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43008','python代码缩进对齐有误','unexpected indent',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43004','python代码变量%s未定义','name ''(S+)'' is not defined',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43005','python udf %s 未定义','Undefined function:s+''(S+)''',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43006','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(S+)'' and ''(S+)''',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43007','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43009','python代码缩进有误','unexpected indent',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43010','python代码反斜杠后面必须换行','unexpected character after line',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43011','导出Excel表超过最大限制1048575','Invalid row number',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43012','python save as table未指定格式,默认用parquet保存,hive查询报错','parquet.io.ParquetDecodingException',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43013','索引使用错误','IndexError',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43014','sql语法有问题','raise ParseException',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','python代码变量%s未定义','ImportError: ''(\\S+)''',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43015','当前节点需要的CS表解析失败,请检查当前CSID对应的CS表是否存在','Cannot parse cs table for node',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43017','存在参数无效或拼写错误,请确认 %s 参数正确性','KeyError: (.*)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43017','存在参数无效或拼写错误,请确认 %s 参数正确性','KeyError: (\\(.+\\))',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43018','文件未找到,请确认该路径( %s )是否存在','FileNotFoundError.*No such file or directory\\:\\s\'(\\S+)\'',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43019','执行表在元数据库中存在meta缓存,meta信息与缓存不一致导致,请增加参数(--conf spark.sql.hive.convertMetastoreOrc=false)后重试','Unable to alter table.*Table is not allowed to be altered',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43020','Python 进程已停止,查询失败!','python process has stopped',0); - -- 46 importExport INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46003','导出文件时无法创建目录:%s','java.io.IOException: Mkdirs failed to create (\\S+) (.+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46004','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (\\S+)',0); +INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46004','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (S+)',0); INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46005','导出语句错误,请检查路径或命名','Illegal out script',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('46006','可能是并发访问同一个HDFS文件,导致Filesystem closed问题,尝试重试','java.io.IOException: Filesystem closed\\n\\s+(at org.apache.hadoop.hdfs.DFSClient.checkOpen)',0); --- 47 tuning --- INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('47001','诊断任务异常:%s,详细异常: %s','Tuning-Code: (\\S+), Tuning-Desc: (.+)',0); - -- 91 wtss INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('91001','找不到变量值,请确认您是否设置相关变量','not find variable substitution for',0); @@ -636,16 +584,6 @@ INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) VALUES (@data_source_type_id, 'envId', '集群环境(Cluster env)', 'Cluster env', NULL, 'SELECT', NULL, 1, '集群环境(Cluster env)', 'Cluster env', NULL, NULL, NULL, @data_source, now(), now()); -select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'mongodb'; -SET @data_source=CONCAT('/data-source-manager/env-list/all/type/',@data_source_type_id); -INSERT INTO `linkis_ps_dm_datasource_type_key` - (`data_source_type_id`, `key`, `name`, `name_en`, `default_value`, `value_type`, `scope`, `require`, `description`, `description_en`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`) -VALUES (@data_source_type_id, 'username', '用户名', NULL, 'TEXT', NULL, 1, '用户名', '^[0-9A-Za-z_-]+$', NULL, '', NULL, now(), now()), - (@data_source_type_id, 'password', '密码', NULL, 'PASSWORD', NULL, 1, '密码', '', NULL, '', NULL, now(), now()), - (@data_source_type_id, 'database', '默认库', NULL, 'TEXT', NULL, 1, '默认库', '^[0-9A-Za-z_-]+$', NULL, '', NULL, now(), now()), - (@data_source_type_id, 'host', 'Host', NULL, 'TEXT', NULL, 1, 'mongodb Host ', NULL, NULL, NULL, NULL, now(), now()), - (@data_source_type_id, 'port', '端口', NULL, 'TEXT', NULL, 1, '端口', NULL, NULL, NULL, NULL, now(), now()), - (@data_source_type_id, 'params', '连接参数', NULL, 'TEXT', NULL, 0, '输入JSON格式: {"param":"value"}', NULL, NULL, NULL, NULL, now(), now()); select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'elasticsearch'; INSERT INTO `linkis_ps_dm_datasource_type_key` diff --git a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql index fd8ead6289..562ee9ad4d 100644 --- a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql +++ b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_ddl.sql @@ -27,8 +27,7 @@ ALTER TABLE `linkis_cg_ec_resource_info_record` MODIFY COLUMN `metrics` text CHA ALTER TABLE `linkis_ps_configuration_config_key` CHANGE COLUMN `validate_range` `validate_range` VARCHAR(150) NULL DEFAULT NULL COMMENT 'Validate range' COLLATE 'utf8_bin' AFTER `validate_type`; ALTER TABLE linkis_cg_tenant_label_config ADD COLUMN is_valid varchar(1) CHARSET utf8mb4 COLLATE utf8mb4_bin DEFAULT 'Y' COMMENT '是否有效'; -ALTER TABLE linkis_ps_configuration_across_cluster_rule modify COLUMN rules varchar(512) CHARSET utf8mb4 COLLATE utf8mb4_bin; -ALTER TABLE linkis_cg_manager_label_value_relation ADD CONSTRAINT unlid_lvk_lvc UNIQUE (label_id,label_value_key,label_value_content); + -- ---------------------------- -- Table structure for linkis_org_user @@ -49,83 +48,5 @@ CREATE TABLE `linkis_org_user` ( PRIMARY KEY (`user_name`) ) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='user org info'; -DROP TABLE IF EXISTS `linkis_ps_job_history_detail`; -DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_config`; - --- ---------------------------- --- Table structure for linkis_cg_tenant_department_config --- ---------------------------- -DROP TABLE IF EXISTS `linkis_cg_tenant_department_config`; -CREATE TABLE `linkis_cg_tenant_department_config` ( - `id` int(20) NOT NULL AUTO_INCREMENT COMMENT 'ID', - `creator` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '应用', - `department` varchar(64) COLLATE utf8_bin NOT NULL COMMENT '部门名称', - `department_id` varchar(16) COLLATE utf8_bin NOT NULL COMMENT '部门ID', - `tenant_value` varchar(128) COLLATE utf8_bin NOT NULL COMMENT '部门租户标签', - `create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间', - `update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间', - `create_by` varchar(50) COLLATE utf8_bin NOT NULL COMMENT '创建用户', - `is_valid` varchar(1) COLLATE utf8_bin NOT NULL DEFAULT 'Y' COMMENT '是否有效', - PRIMARY KEY (`id`), - UNIQUE KEY `uniq_creator_department` (`creator`,`department`) -) ENGINE=InnoDB AUTO_INCREMENT=0 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - -DROP TABLE IF EXISTS `linkis_org_user_sync`; -CREATE TABLE `linkis_org_user_sync` ( - `cluster_code` varchar(16) COMMENT '集群', - `user_type` varchar(64) COMMENT '用户类型', - `user_name` varchar(128) COMMENT '授权用户', - `org_id` varchar(16) COMMENT '部门ID', - `org_name` varchar(64) COMMENT '部门名字', - `queue_name` varchar(64) COMMENT '默认资源队列', - `db_name` varchar(64) COMMENT '默认操作数据库', - `interface_user` varchar(64) COMMENT '接口人', - `is_union_analyse` varchar(64) COMMENT '是否联合分析人', - `create_time` varchar(64) COMMENT '用户创建时间', - `user_itsm_no` varchar(64) COMMENT '用户创建单号', - PRIMARY KEY (`user_name`) -) ENGINE = InnoDB DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='用户部门统计INC表'; - -DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_config`; -CREATE TABLE `linkis_mg_gateway_whitelist_config` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `allowed_user` varchar(128) COLLATE utf8_bin NOT NULL, - `client_address` varchar(128) COLLATE utf8_bin NOT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `address_uniq` (`allowed_user`, `client_address`), - KEY `linkis_mg_gateway_whitelist_config_allowed_user` (`allowed_user`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - -DROP TABLE IF EXISTS `linkis_mg_gateway_whitelist_sensitive_user`; -CREATE TABLE `linkis_mg_gateway_whitelist_sensitive_user` ( - `id` int(11) NOT NULL AUTO_INCREMENT, - `sensitive_username` varchar(128) COLLATE utf8_bin NOT NULL, - `create_time` datetime DEFAULT NULL, - `update_time` datetime DEFAULT NULL, - PRIMARY KEY (`id`), - UNIQUE KEY `sensitive_username` (`sensitive_username`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; - -DROP TABLE IF EXISTS `linkis_ps_python_module_info`; -CREATE TABLE `linkis_ps_python_module_info` ( - `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '自增id', - `name` varchar(255) NOT NULL COMMENT 'python模块名称', - `description` text COMMENT 'python模块描述', - `path` varchar(255) NOT NULL COMMENT 'hdfs路径', - `engine_type` varchar(50) NOT NULL COMMENT '引擎类型,python/spark/all', - `create_user` varchar(50) NOT NULL COMMENT '创建用户', - `update_user` varchar(50) NOT NULL COMMENT '修改用户', - `is_load` tinyint(1) NOT NULL DEFAULT '0' COMMENT '是否加载,0-未加载,1-已加载', - `is_expire` tinyint(1) DEFAULT NULL COMMENT '是否过期,0-未过期,1-已过期)', - `create_time` datetime NOT NULL COMMENT '创建时间', - `update_time` datetime NOT NULL COMMENT '修改时间', - PRIMARY KEY (`id`) -) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin; COMMENT='Python模块包信息表'; - -ALTER TABLE linkis_cg_manager_service_instance ADD COLUMN params text COLLATE utf8_bin DEFAULT NULL; - - diff --git a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql index c3d73821df..0c9b591a27 100644 --- a/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql +++ b/linkis-dist/package/db/upgrade/1.6.0_schema/mysql/linkis_dml.sql @@ -17,90 +17,3 @@ select @data_source_type_id := id from `linkis_ps_dm_datasource_type` where `name` = 'doris'; UPDATE linkis_ps_dm_datasource_type_key SET `require` = 0 WHERE `key` ="password" and `data_source_type_id` = @data_source_type_id; -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01006','没有健康可用的ecm节点,可能任务量大,导致节点资源处于不健康状态,尝试kill空闲引擎释放资源','There are corresponding ECM tenant labels',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('01007','文件编码格式异常,请联系管理人员处理','UnicodeEncodeError.*characters',0); -UPDATE linkis_ps_error_code SET error_regex = "KeyError: (.*)" WHERE error_code = "43017"; -UPDATE linkis_ps_error_code SET error_desc = "任务实际运行内存超过了设置的内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory增加内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13002"; -UPDATE linkis_ps_configuration_config_key SET validate_range ='[\",\",\"\\\\t\",\"\\\\;\",\"\\\\|\"]',description ="取值范围:,或\\t或;或|" WHERE `key`= "pipeline.field.split"; -DELETE FROM linkis_ps_error_code WHERE error_code = "43007"; -UPDATE linkis_ps_error_code SET error_regex='Permission denied:\\s*user=[a-zA-Z0-9_]+[,,]\\s*access=[a-zA-Z0-9_]+\\s*[,,]\\s*inode="([a-zA-Z0-9/_\\.]+)"' WHERE error_code = "22001"; -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13010','任务实际运行内存超过了设置的内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory增加内存','Container exited with a non-zero exit code',0); -UPDATE linkis_ps_configuration_config_key SET `key`="pipeline.output.isoverwrite" where `key` = "pipeline.output.isoverwtite"; -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43042','插入数据表动态分区数超过配置值 %s ,请优化sql或调整配置hive.exec.max.dynamic.partitions后重试','Maximum was set to (\\S+) partitions per node',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43043','执行任务消耗内存超过限制,hive任务请修改map或reduce的内存,spark任务请修改executor端内存','Error:java heap space',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43044','表 %s 分区数超过阈值 %s,需要分批删除分区,再删除表','the partitions of table (\\S+) exceeds threshold (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43045','查询/操作的表 %s 分区数为 %s ,超过阈值 %s ,需要限制查询/操作的分区数量','Number of partitions scanned \\(=(\\d+)\\) on table (\\S+) exceeds limit \\(=(\\d+)\\)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43046','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Number of dynamic partitions created is (\\S+), which is more than (\\S+)',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43047','动态分区一次性写入分区数 %s ,超过阈值 %s,请减少一次性写入的分区数','Maximum was set to (\\S+) partitions per node, number of dynamic partitions on this node: (\\S+)',0); -INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`,`en_description`, `en_name`, `en_treeName`, `template_required`) VALUES ('mapreduce.job.reduce.slowstart.completedmaps', '取值范围:0-1', 'Map任务数与总Map任务数之间的比例','0.05', 'Regex', '^(0(\\.\\d{1,2})?|1(\\.0{1,2})?)$', '0', '0', '1', 'hive引擎设置', 'hive', 'Value Range: 0-1', 'The Ratio Between The Number Of Map Tasks And The Total Number Of Map Tasks', 'Hive Engine Settings', '1'); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) -(select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config -INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'hive' and config.`key` = "mapreduce.job.reduce.slowstart.completedmaps" and label_value = "*-*,hive-2.3.3"); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) -(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation -INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = (select id FROM linkis_ps_configuration_config_key where `key`="mapreduce.job.reduce.slowstart.completedmaps")AND label.label_value = '*-*,hive-2.3.3'); -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13002"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13010"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或调优sql后执行" WHERE error_code = "13003"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13004"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13005"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13006"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13007"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整executor内存或联系管理员扩容" WHERE error_code = "01002"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整executor内存或联系管理员扩容" WHERE error_code = "01003"; --- add starrocks -INSERT INTO `linkis_ps_dm_datasource_type` (`name`, `description`, `option`, `classifier`, `icon`, `layers`, `description_en`, `option_en`, `classifier_en`) VALUES ('starrocks', 'starrocks数据库', 'starrocks', 'olap', '', 4, 'StarRocks Database', 'StarRocks', 'Olap'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'host','主机名(Host)',NULL,'TEXT',NULL,1,'主机名(Host)',NULL,NULL,NULL,NULL,now(),now(),'Host','Host'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'port','TCP端口号(Port)','9030','TEXT',NULL,1,'TCP端口号',NULL,NULL,NULL,NULL,now(),now(),'Tcp_Port','Tcp_Port'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'driverClassName','驱动类名(Driver class name)','com.mysql.jdbc.Driver','TEXT',NULL,1,'驱动类名(Driver class name)','',NULL,NULL,NULL,'2024-05-23 18:28:07.0','2024-05-23 18:28:07.0','Driver class name','Driver class name'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'username','用户名(Username)',NULL,'TEXT',NULL,1,'用户名(Username)','^[0-9A-Za-z_-]+$',NULL,NULL,NULL,now(),now(),'Username','Username'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'password','密码(Password)',NULL,'PASSWORD',NULL,1,'密码(Password)','',NULL,NULL,NULL,now(),now(),'Password','Password'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'databaseName','数据库名(Database name)',NULL,'TEXT',NULL,0,'数据库名(Database name)',NULL,NULL,NULL,NULL,now(),now(),'Database name','Database name'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'params','连接参数(Connection params)',NULL,'TEXT',NULL,0,'输入JSON格式(Input JSON format): {"param":"value"}',NULL,NULL,NULL,NULL,now(),now(),'Connection params','Input JSON format: {"param":"value"}'); -INSERT INTO `linkis_ps_dm_datasource_type_key` (`data_source_type_id`, `key`, `name`, `default_value`, `value_type`, `scope`, `require`, `description`, `value_regex`, `ref_id`, `ref_value`, `data_source`, `update_time`, `create_time`, `name_en`, `description_en`) VALUES ((select id from `linkis_ps_dm_datasource_type` where `name` = 'starrocks'),'http_port','HTTP端口号(Port)','8030','TEXT',NULL,0,'HTTP端口号',NULL,NULL,NULL,NULL,now(),now(),'Http_Port','Http_Port'); --- add userClientIP for tdsql -INSERT INTO linkis_ps_dm_datasource_type_key (data_source_type_id, `key`, name, default_value, value_type, `scope`, `require`, description, value_regex, ref_id, ref_value, data_source, update_time, create_time, name_en, description_en) VALUES(5, 'userClientIp', 'userClientIp', NULL, 'TEXT', 'ENV', 0, 'userClientIp', NULL, NULL, NULL, NULL, now(),now(), 'user client ip', 'user client ip'); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43019','执行表在元数据库中存在meta缓存,meta信息与缓存不一致导致,请增加参数(--conf spark.sql.hive.convertMetastoreOrc=false)后重试','Unable to alter table.*Table is not allowed to be altered',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13011','广播表过大导致driver内存溢出,请在执行sql前增加参数后重试:set spark.sql.autoBroadcastJoinThreshold=-1;','dataFrame to local exception',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43048','参数引用错误,请检查参数 %s 是否正常引用','UnboundLocalError.*local variable (\\S+) referenced before assignment',0); -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容" WHERE error_code = "01002"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Linkis服务负载过高,请在管理台调整Driver内存或联系管理员扩容" WHERE error_code = "01003"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13005"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13006"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加Driver内存或在提交任务时通过spark.driver.memory调整内存。更多细节请参考Linkis常见问题Q60" WHERE error_code = "13007"; -UPDATE linkis_ps_error_code SET error_desc = "您的任务因为引擎退出(退出可能是引擎进程OOM或者主动kill引擎)导致失败" WHERE error_code = "13009"; -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13012','driver内存不足,请增加driver内存后重试','Failed to allocate a page (\\S+.*\\)), try again.',0); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('13013','使用spark默认变量sc导致后续代码执行失败','sc.setJobGroup(\\S+.*\\))',0); -DELETE FROM linkis_ps_error_code WHERE error_code = "43016"; -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43016','模块 %s 没有属性 %s ,请确认代码引用是否正常','AttributeError: \'(\\S+)\' object has no attribute \'(\\S+)\'',0); -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台调整内存参数。" WHERE error_code = "13004"; -INSERT INTO linkis_cg_manager_label (label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES ('combined_userCreator_engineType','*-IDE,nebula-3.0.0','OPTIONAL',2,now(),now()); -INSERT INTO linkis_cg_manager_label (label_key,label_value,label_feature,label_value_size,update_time,create_time) VALUES ('combined_userCreator_engineType','*-*,nebula-3.0.0','OPTIONAL',2,now(),now()); -insert into linkis_ps_configuration_category (`label_id`, `level`) VALUES ((select id from linkis_cg_manager_label where `label_value` = '*-IDE,nebula-3.0.0'), 2); -INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.host','Nebula 连接地址','Nebula 连接地址',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Host','Nebula Host',0); -INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.port','Nebula 连接端口','Nebula 连接端口',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Port','Nebula Port',0); -INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.username','Nebula 连接用户名','Nebula 连接用户名',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Username','Nebula Username',0); -INSERT INTO linkis_ps_configuration_config_key (`key`,description,name,default_value,validate_type,validate_range,engine_conn_type,is_hidden,is_advanced,`level`,treeName,boundary_type,en_treeName,en_description,en_name,template_required) VALUES -('linkis.nebula.password','Nebula 连接密码','Nebula 连接密码',NULL,'None',NULL,'nebula',0,0,1,'Necula引擎设置',0,'Nebula Engine Settings','Nebula Password','Nebula Password',0); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.host' and label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.port' and label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.username' and label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) (select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.password' and label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, '127.0.0.1' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.host') AND label.label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, '9669' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.port') AND label.label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.username') AND label.label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.password') AND label.label_value = '*-*,nebula-3.0.0'); -INSERT INTO linkis_ps_configuration_config_key (`key`, description, name, default_value, validate_type, validate_range, engine_conn_type, is_hidden, is_advanced, `level`, treeName, boundary_type, en_treeName, en_description, en_name, template_required) VALUES ('linkis.nebula.space', 'Nebula 图空间', 'Nebula 图空间', NULL, 'None', NULL, 'nebula', 0, 0, 1, 'Necula引擎设置', 0, 'Nebula Engine Settings', 'Nebula Space', 'Nebula Space', 0); -insert into `linkis_ps_configuration_key_engine_relation` (`config_key_id`, `engine_type_label_id`) ( select config.id as `config_key_id`, label.id AS `engine_type_label_id` FROM linkis_ps_configuration_config_key config INNER JOIN linkis_cg_manager_label label ON config.engine_conn_type = 'nebula' and config.`key` = 'linkis.nebula.space' and label_value = '*-*,nebula-3.0.0'); -insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`) ( select `relation`.`config_key_id` AS `config_key_id`, 'nebula' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id and relation.config_key_id = ( select id FROM linkis_ps_configuration_config_key where `key` = 'linkis.nebula.space') AND label.label_value = '*-*,nebula-3.0.0'); -INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('43020','Python 进程已停止,查询失败!','python process has stopped',0); -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存。" WHERE error_code = "13002"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致引擎意外退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13004"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Pyspark子进程退出,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13007"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,请在管理台增加executor内存或在提交任务时通过spark.executor.memory或spark.executor.memoryOverhead调整内存" WHERE error_code = "13010"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark app应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存" WHERE error_code = "13005"; -UPDATE linkis_ps_error_code SET error_desc = "任务运行内存超过设置内存限制,导致Spark context应用退出,请在管理台增加driver内存或在提交任务时通过spark.driver.memory调整内存" WHERE error_code = "13006"; -update linkis_ps_dm_datasource_type_key set name='Catalogs', description='Catalogs',name_en='Catalogs',description_en='Catalogs' where data_source_type_id in (select id from linkis_ps_dm_datasource_type where name = 'starrocks') and `key` = 'databaseName'; diff --git a/linkis-engineconn-plugins/elasticsearch/src/main/java/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.java b/linkis-engineconn-plugins/elasticsearch/src/main/java/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.java index fcb4a641c9..418c191b62 100644 --- a/linkis-engineconn-plugins/elasticsearch/src/main/java/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.java +++ b/linkis-engineconn-plugins/elasticsearch/src/main/java/org/apache/linkis/engineplugin/elasticsearch/executor/ElasticSearchEngineConnExecutor.java @@ -43,7 +43,7 @@ import org.apache.linkis.scheduler.executer.ErrorExecuteResponse; import org.apache.linkis.scheduler.executer.ExecuteResponse; import org.apache.linkis.storage.LineRecord; -import org.apache.linkis.storage.resultset.ResultSetFactory$; +import org.apache.linkis.storage.resultset.ResultSetFactory; import org.apache.linkis.storage.resultset.table.TableMetaData; import org.apache.commons.collections.MapUtils; @@ -134,7 +134,7 @@ public ExecuteResponse executeLine(EngineExecutionContext engineExecutorContext, (ElasticSearchTableResponse) elasticSearchResponse; TableMetaData metaData = new TableMetaData(tableResponse.columns()); ResultSetWriter resultSetWriter = - engineExecutorContext.createResultSetWriter(ResultSetFactory$.MODULE$.TABLE_TYPE()); + engineExecutorContext.createResultSetWriter(ResultSetFactory.TABLE_TYPE()); resultSetWriter.addMetaData(metaData); Arrays.asList(tableResponse.records()) .forEach( @@ -152,7 +152,7 @@ record -> { } else if (elasticSearchResponse instanceof ElasticSearchJsonResponse) { ElasticSearchJsonResponse jsonResponse = (ElasticSearchJsonResponse) elasticSearchResponse; ResultSetWriter resultSetWriter = - engineExecutorContext.createResultSetWriter(ResultSetFactory$.MODULE$.TABLE_TYPE()); + engineExecutorContext.createResultSetWriter(ResultSetFactory.TEXT_TYPE()); resultSetWriter.addMetaData(null); Arrays.stream(jsonResponse.value().split("\\n")) .forEach( diff --git a/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala b/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala index c582d40a81..ee15f882b8 100644 --- a/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala +++ b/linkis-engineconn-plugins/flink/flink-core/src/main/scala/org/apache/linkis/engineconnplugin/flink/launch/FlinkEngineConnLaunchBuilder.scala @@ -108,6 +108,19 @@ class FlinkEngineConnLaunchBuilder extends JavaProcessEngineConnLaunchBuilder { environment } + override def getEnvironment(implicit + engineConnBuildRequest: EngineConnBuildRequest + ): util.Map[String, String] = { + val environment = new util.HashMap[String, String] + addPathToClassPath(environment, variable(PWD)) + val linkisEnvironment = super.getEnvironment + val linkisClassPath = linkisEnvironment.get(Environment.CLASSPATH.toString) + val v = environment.get(Environment.CLASSPATH.toString) + CLASS_PATH_SEPARATOR + linkisClassPath + environment.put(Environment.CLASSPATH.toString, v) + logger.info(environment.asScala.map(e => s"${e._1}->${e._2}").mkString(",")) + environment + } + private def contentToBmlResource(userName: String, content: String): BmlResource = { val contentMap = JsonUtils.jackson.readValue(content, classOf[util.Map[String, Object]]) contentToBmlResource(userName, contentMap) diff --git a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java b/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java index def097b38b..db306b2bd5 100644 --- a/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java +++ b/linkis-engineconn-plugins/openlookeng/src/main/java/org/apache/linkis/engineplugin/openlookeng/executor/OpenLooKengEngineConnExecutor.java @@ -45,7 +45,7 @@ import org.apache.linkis.scheduler.executer.ExecuteResponse; import org.apache.linkis.scheduler.executer.SuccessExecuteResponse; import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.ResultSetFactory$; +import org.apache.linkis.storage.resultset.ResultSetFactory; import org.apache.linkis.storage.resultset.table.TableMetaData; import org.apache.linkis.storage.resultset.table.TableRecord; @@ -305,7 +305,7 @@ private void queryOutput( int columnCount = 0; int rows = 0; ResultSetWriter resultSetWriter = - engineExecutorContext.createResultSetWriter(ResultSetFactory$.MODULE$.TABLE_TYPE()); + engineExecutorContext.createResultSetWriter(ResultSetFactory.TABLE_TYPE()); try { QueryStatusInfo results = null; if (statement.isRunning()) { diff --git a/linkis-engineconn-plugins/presto/src/main/java/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.java b/linkis-engineconn-plugins/presto/src/main/java/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.java index 1bc16ee601..460de48305 100644 --- a/linkis-engineconn-plugins/presto/src/main/java/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.java +++ b/linkis-engineconn-plugins/presto/src/main/java/org/apache/linkis/engineplugin/presto/executor/PrestoEngineConnExecutor.java @@ -48,7 +48,7 @@ import org.apache.linkis.scheduler.executer.SuccessExecuteResponse; import org.apache.linkis.storage.domain.Column; import org.apache.linkis.storage.domain.DataType; -import org.apache.linkis.storage.resultset.ResultSetFactory$; +import org.apache.linkis.storage.resultset.ResultSetFactory; import org.apache.linkis.storage.resultset.table.TableMetaData; import org.apache.linkis.storage.resultset.table.TableRecord; @@ -325,7 +325,7 @@ private void queryOutput( int columnCount = 0; int rows = 0; ResultSetWriter resultSetWriter = - engineExecutorContext.createResultSetWriter(ResultSetFactory$.MODULE$.TABLE_TYPE()); + engineExecutorContext.createResultSetWriter(ResultSetFactory.TABLE_TYPE()); try { QueryStatusInfo results = null; if (statement.isRunning()) { diff --git a/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java b/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java index d005260e75..8ccfc31ce5 100644 --- a/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java +++ b/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/BmlProjectDaoTest.java @@ -106,11 +106,11 @@ void testGetProjectIdByName() { assertTrue(i != null); } -// @Test -// void testAttachResourceAndProject() { -// insertNewProject(); -// bmlProjectDao.attachResourceAndProject(1, "123"); -// } + @Test + void testAttachResourceAndProject() { + insertNewProject(); + bmlProjectDao.attachResourceAndProject(1, "123"); + } @Test void testCheckIfExists() { diff --git a/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java b/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java index 1bdcbfd35c..93bacdc018 100644 --- a/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java +++ b/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/ResourceDaoTest.java @@ -34,8 +34,6 @@ class ResourceDaoTest extends BaseDaoTest { void insertResource() { Resource resource = new Resource(); resource.setResourceId("123"); - resource.setPrivate(false); - resource.setExpire(false); resource.setResourceHeader("2"); resource.setDownloadedFileName("testFileName"); resource.setSystem("testSystem"); diff --git a/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java b/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java index 5d5a82ab44..434d8961a9 100644 --- a/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java +++ b/linkis-public-enhancements/linkis-bml-server/src/test/java/org/apache/linkis/bml/dao/VersionDaoTest.java @@ -30,189 +30,189 @@ class VersionDaoTest extends BaseDaoTest { - @Autowired VersionDao versionDao; - - private final String resourceId = "123"; - private final String version = "1.2"; - - void insertVersion() { - ResourceVersion resourceVersion = new ResourceVersion(); - resourceVersion.setResourceId("123"); - resourceVersion.setUser("binbin"); - resourceVersion.setSystem("testSys"); - resourceVersion.setFileMd5("binbinmd5"); - resourceVersion.setVersion("1.2"); - resourceVersion.setSize(25); - resourceVersion.setStartByte(35); - resourceVersion.setEndByte(36); - resourceVersion.setResource("testreso"); - resourceVersion.setDescription("testDesc"); - resourceVersion.setStartTime(new Date()); - resourceVersion.setEndTime(new Date()); - resourceVersion.setClientIp("132.145.36"); - resourceVersion.setUpdator("testUp"); - resourceVersion.setEnableFlag(true); - versionDao.insertNewVersion(resourceVersion); - } - - @Test - void testGetVersion() { - insertVersion(); - versionDao.getVersion(resourceId, version); - } - - @Test - void testGetVersions() { - insertVersion(); - versionDao.getVersions(resourceId); - } - - @Test - void testGetResourcesVersions() { - insertVersion(); - Map map = new HashMap<>(); - map.put("system", "testSys"); - map.put("user", "binbin"); - List list = new ArrayList<>(); - list.add("123"); - list.add("321"); - map.put("resourceIds", list); - versionDao.getResourcesVersions(map); - } - - @Test - void testDeleteVersion() { - insertVersion(); - versionDao.deleteVersion(resourceId, version); - } - - @Test - void testDeleteVersions() { - insertVersion(); - versionDao.deleteVersions(resourceId); - } - - @Test - void testBathDeleteVersions() { - insertVersion(); - List resourceIdlist = new ArrayList<>(); - resourceIdlist.add(resourceId); - resourceIdlist.add("21"); - List versionlist = new ArrayList<>(); - versionlist.add(version); - versionlist.add("2.1"); - versionDao.bathDeleteVersions(resourceIdlist, versionlist); - } - - @Test - void testInsertNewVersion() { - ResourceVersion resourceVersion = new ResourceVersion(); - resourceVersion.setResourceId(resourceId); - resourceVersion.setUser("binbin"); - resourceVersion.setSystem("testSys"); - resourceVersion.setFileMd5("binbinmd5"); - resourceVersion.setVersion(version); - resourceVersion.setSize(25); - resourceVersion.setStartByte(35); - resourceVersion.setEndByte(36); - resourceVersion.setResource("testreso"); - resourceVersion.setDescription("testDesc"); - resourceVersion.setStartTime(new Date()); - resourceVersion.setEndTime(new Date()); - resourceVersion.setClientIp("132.145.36"); - resourceVersion.setUpdator("testUp"); - resourceVersion.setEnableFlag(true); - versionDao.insertNewVersion(resourceVersion); - } - - @Test - void testGetResourcePath() { - insertVersion(); - versionDao.getResourcePath(resourceId); - } - - @Test - void testGetNewestVersion() { - insertVersion(); - versionDao.getNewestVersion(resourceId); - } - - @Test - void testGetStartByteForResource() { - insertVersion(); - versionDao.getStartByteForResource(resourceId, version); - } - -// @Test -// void testGetEndByte() { -// insertVersion(); -// versionDao.getEndByte(resourceId, version); -// } - - @Test - void testFindResourceVersion() { - insertVersion(); - versionDao.findResourceVersion(resourceId, version); - } - - @Test - void testGetAllResourcesViaSystem() { - insertVersion(); - versionDao.getAllResourcesViaSystem(resourceId, version); - } - - @Test - void testSelectResourcesViaSystemByPage() { - insertVersion(); - versionDao.selectResourcesViaSystemByPage(resourceId, version); - } - - @Test - void testCheckVersion() { - insertVersion(); - versionDao.checkVersion(resourceId, version); - } - -// @Test -// void testSelectResourceVersionEnbleFlag() { -// insertVersion(); -// versionDao.selectResourceVersionEnbleFlag(resourceId, version); -// } - - @Test - void testDeleteResource() { - insertVersion(); - versionDao.deleteResource(resourceId); - } - - @Test - void testBatchDeleteResources() { - insertVersion(); - List resourceIdlist = new ArrayList<>(); - resourceIdlist.add(resourceId); - resourceIdlist.add("21"); - List versionlist = new ArrayList<>(); - versionlist.add(version); - versionlist.add("2.1"); - versionDao.bathDeleteVersions(resourceIdlist, versionlist); - } - - @Test - void testGetResourceVersion() { - versionDao.getResourceVersion(resourceId, version); - } - - @Test - void testSelectVersionByPage() { - insertVersion(); - List list = versionDao.selectVersionByPage(resourceId); - assertTrue(list.size() >= 1); - } - - @Test - void testGetResourceVersionsByResourceId() { - insertVersion(); - List list = versionDao.getResourceVersionsByResourceId(resourceId); - assertTrue(list.size() >= 1); - } + @Autowired VersionDao versionDao; + + private final String resourceId = "123"; + private final String version = "1.2"; + + void insertVersion() { + ResourceVersion resourceVersion = new ResourceVersion(); + resourceVersion.setResourceId(resourceId); + resourceVersion.setUser("binbin"); + resourceVersion.setSystem("testSys"); + resourceVersion.setFileMd5("binbinmd5"); + resourceVersion.setVersion(version); + resourceVersion.setSize(25); + resourceVersion.setStartByte(35); + resourceVersion.setEndByte(36); + resourceVersion.setResource("testreso"); + resourceVersion.setDescription("testDesc"); + resourceVersion.setStartTime(new Date()); + resourceVersion.setEndTime(new Date()); + resourceVersion.setClientIp("132.145.36"); + resourceVersion.setUpdator("testUp"); + resourceVersion.setEnableFlag(true); + versionDao.insertNewVersion(resourceVersion); + } + + @Test + void testGetVersion() { + insertVersion(); + versionDao.getVersion(resourceId, version); + } + + @Test + void testGetVersions() { + insertVersion(); + versionDao.getVersions(resourceId); + } + + @Test + void testGetResourcesVersions() { + insertVersion(); + Map map = new HashMap<>(); + map.put("system", "testSys"); + map.put("user", "binbin"); + List list = new ArrayList<>(); + list.add("123"); + list.add("321"); + map.put("resourceIds", list); + versionDao.getResourcesVersions(map); + } + + @Test + void testDeleteVersion() { + insertVersion(); + versionDao.deleteVersion(resourceId, version); + } + + @Test + void testDeleteVersions() { + insertVersion(); + versionDao.deleteVersions(resourceId); + } + + @Test + void testBathDeleteVersions() { + insertVersion(); + List resourceIdlist = new ArrayList<>(); + resourceIdlist.add(resourceId); + resourceIdlist.add("21"); + List versionlist = new ArrayList<>(); + versionlist.add(version); + versionlist.add("2.1"); + versionDao.bathDeleteVersions(resourceIdlist, versionlist); + } + + @Test + void testInsertNewVersion() { + ResourceVersion resourceVersion = new ResourceVersion(); + resourceVersion.setResourceId(resourceId); + resourceVersion.setUser("binbin"); + resourceVersion.setSystem("testSys"); + resourceVersion.setFileMd5("binbinmd5"); + resourceVersion.setVersion(version); + resourceVersion.setSize(25); + resourceVersion.setStartByte(35); + resourceVersion.setEndByte(36); + resourceVersion.setResource("testreso"); + resourceVersion.setDescription("testDesc"); + resourceVersion.setStartTime(new Date()); + resourceVersion.setEndTime(new Date()); + resourceVersion.setClientIp("132.145.36"); + resourceVersion.setUpdator("testUp"); + resourceVersion.setEnableFlag(true); + versionDao.insertNewVersion(resourceVersion); + } + + @Test + void testGetResourcePath() { + insertVersion(); + versionDao.getResourcePath(resourceId); + } + + @Test + void testGetNewestVersion() { + insertVersion(); + versionDao.getNewestVersion(resourceId); + } + + @Test + void testGetStartByteForResource() { + insertVersion(); + versionDao.getStartByteForResource(resourceId, version); + } + + @Test + void testGetEndByte() { + insertVersion(); + versionDao.getEndByte(resourceId, version); + } + + @Test + void testFindResourceVersion() { + insertVersion(); + versionDao.findResourceVersion(resourceId, version); + } + + @Test + void testGetAllResourcesViaSystem() { + insertVersion(); + versionDao.getAllResourcesViaSystem(resourceId, version); + } + + @Test + void testSelectResourcesViaSystemByPage() { + insertVersion(); + versionDao.selectResourcesViaSystemByPage(resourceId, version); + } + + @Test + void testCheckVersion() { + insertVersion(); + versionDao.checkVersion(resourceId, version); + } + + @Test + void testSelectResourceVersionEnbleFlag() { + insertVersion(); + versionDao.selectResourceVersionEnbleFlag(resourceId, version); + } + + @Test + void testDeleteResource() { + insertVersion(); + versionDao.deleteResource(resourceId); + } + + @Test + void testBatchDeleteResources() { + insertVersion(); + List resourceIdlist = new ArrayList<>(); + resourceIdlist.add(resourceId); + resourceIdlist.add("21"); + List versionlist = new ArrayList<>(); + versionlist.add(version); + versionlist.add("2.1"); + versionDao.bathDeleteVersions(resourceIdlist, versionlist); + } + + @Test + void testGetResourceVersion() { + versionDao.getResourceVersion(resourceId, version); + } + + @Test + void testSelectVersionByPage() { + insertVersion(); + List list = versionDao.selectVersionByPage(resourceId); + assertTrue(list.size() >= 1); + } + + @Test + void testGetResourceVersionsByResourceId() { + insertVersion(); + List list = versionDao.getResourceVersionsByResourceId(resourceId); + assertTrue(list.size() >= 1); + } } diff --git a/linkis-public-enhancements/linkis-bml-server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-bml-server/src/test/resources/application.properties index bd6cb1a4a6..fee53af985 100644 --- a/linkis-public-enhancements/linkis-bml-server/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-bml-server/src/test/resources/application.properties @@ -18,14 +18,9 @@ #h2 database config spring.datasource.driver-class-name=org.h2.Driver #init -spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false - spring.datasource.hikari.connection-test-query=select 1 spring.datasource.hikari.minimum-idle=5 spring.datasource.hikari.auto-commit=true @@ -44,4 +39,4 @@ mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl #disable eureka discovery client spring.cloud.service-registry.auto-registration.enabled=false eureka.client.enabled=false -eureka.client.serviceUrl.registerWithEureka=false +eureka.client.serviceUrl.registerWithEureka=false \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-bml-server/src/test/resources/create.sql b/linkis-public-enhancements/linkis-bml-server/src/test/resources/create.sql index f4c9696b16..4ad89a52ef 100644 --- a/linkis-public-enhancements/linkis-bml-server/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-bml-server/src/test/resources/create.sql @@ -18,139 +18,117 @@ SET FOREIGN_KEY_CHECKS = 0; SET REFERENTIAL_INTEGRITY FALSE; - DROP TABLE IF EXISTS linkis_ps_bml_project_resource; -CREATE TABLE linkis_ps_bml_project_resource ( - id int(10) NOT NULL AUTO_INCREMENT, - project_id int(10) NOT NULL, - resource_id varchar(128), - PRIMARY KEY (id) +CREATE TABLE linkis_ps_bml_project_resource ( + id int(10) NOT NULL AUTO_INCREMENT, + project_id int(10) NOT NULL, + resource_id varchar(128), + PRIMARY KEY ( id ) ); + + DROP TABLE IF EXISTS linkis_ps_bml_project; CREATE TABLE linkis_ps_bml_project ( - id int(10) NOT NULL AUTO_INCREMENT, - name varchar(128) DEFAULT NULL, - system varchar(64) NOT NULL DEFAULT 'dss', - source varchar(1024) DEFAULT NULL, - description varchar(1024) DEFAULT NULL, - creator varchar(128) NOT NULL, - enabled tinyint(4) DEFAULT '1', - create_time datetime DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (id), - UNIQUE KEY name (name) + id int(10) NOT NULL AUTO_INCREMENT, + name varchar(128) DEFAULT NULL, + system varchar(64) NOT NULL DEFAULT 'dss', + source varchar(1024) DEFAULT NULL, + description varchar(1024) DEFAULT NULL, + creator varchar(128) NOT NULL, + enabled tinyint(4) DEFAULT '1', + create_time datetime DEFAULT CURRENT_TIMESTAMP, + PRIMARY KEY ( id ), + UNIQUE KEY name ( name ) ); + + DROP TABLE IF EXISTS linkis_ps_bml_project_user; -CREATE TABLE linkis_ps_bml_project_user ( - id int(10) NOT NULL AUTO_INCREMENT, - project_id int(10) NOT NULL, - username varchar(64) DEFAULT NULL, - priv int(10) NOT NULL DEFAULT '7', - creator varchar(128) NOT NULL, - create_time datetime DEFAULT CURRENT_TIMESTAMP, - expire_time datetime DEFAULT NULL, - PRIMARY KEY (id), - UNIQUE KEY user_project (username, project_id) +CREATE TABLE linkis_ps_bml_project_user ( + id int(10) NOT NULL AUTO_INCREMENT, + project_id int(10) NOT NULL, + username varchar(64) DEFAULT NULL, + priv int(10) NOT NULL DEFAULT '7', + creator varchar(128) NOT NULL, + create_time datetime DEFAULT CURRENT_TIMESTAMP, + expire_time datetime DEFAULT NULL, + PRIMARY KEY ( id ), + UNIQUE KEY user_project ( username , project_id ) ); + + + DROP TABLE IF EXISTS linkis_ps_bml_resources_version; -CREATE TABLE linkis_ps_bml_resources_version ( - id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key', - resource_id varchar(50) NOT NULL COMMENT 'Resource uuid', - file_md5 varchar(32) NOT NULL COMMENT 'Md5 summary of the file', - version varchar(20) NOT NULL COMMENT 'Resource version (v plus five digits)', - size int(10) NOT NULL COMMENT 'File size', - start_byte bigint(20) unsigned NOT NULL DEFAULT '0', - end_byte bigint(20) unsigned NOT NULL DEFAULT '0', - resource varchar(2000) NOT NULL COMMENT 'Resource content (file information including path and file name)', - description varchar(2000) DEFAULT NULL COMMENT 'description', - start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Started time', - end_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Stoped time', - client_ip varchar(200) NOT NULL COMMENT 'Client ip', - updator varchar(50) DEFAULT NULL COMMENT 'updator', - enable_flag tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', - PRIMARY KEY (id), - UNIQUE KEY resource_id_version (resource_id, version) +CREATE TABLE linkis_ps_bml_resources_version ( + id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key', + resource_id varchar(50) NOT NULL COMMENT 'Resource uuid', + file_md5 varchar(32) NOT NULL COMMENT 'Md5 summary of the file', + version varchar(20) NOT NULL COMMENT 'Resource version (v plus five digits)', + size int(10) NOT NULL COMMENT 'File size', + start_byte bigint(20) unsigned NOT NULL DEFAULT '0', + end_byte bigint(20) unsigned NOT NULL DEFAULT '0', + resource varchar(2000) NOT NULL COMMENT 'Resource content (file information including path and file name)', + description varchar(2000) DEFAULT NULL COMMENT 'description', + start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Started time', + end_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Stoped time', + client_ip varchar(200) NOT NULL COMMENT 'Client ip', + updator varchar(50) DEFAULT NULL COMMENT 'updator', + enable_flag tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + PRIMARY KEY ( id ), + UNIQUE KEY resource_id_version ( resource_id , version ) ); + DROP TABLE IF EXISTS linkis_ps_bml_resources_task; -CREATE TABLE linkis_ps_bml_resources_task ( - id bigint(20) NOT NULL AUTO_INCREMENT, - resource_id varchar(50) DEFAULT NULL COMMENT 'resource uuid', - version varchar(20) DEFAULT NULL COMMENT 'Resource version number of the current operation', - operation varchar(20) NOT NULL COMMENT 'Operation type. upload = 0, update = 1', - state varchar(20) NOT NULL DEFAULT 'Schduled' COMMENT 'Current status of the task:Schduled, Running, Succeed, Failed,Cancelled', - submit_user varchar(20) NOT NULL DEFAULT '' COMMENT 'Job submission user name', - system varchar(20) DEFAULT 'dss' COMMENT 'Subsystem name: wtss', - instance varchar(128) NOT NULL COMMENT 'Material library example', - client_ip varchar(50) DEFAULT NULL COMMENT 'Request IP', - extra_params text COMMENT 'Additional key information. Such as the resource IDs and versions that are deleted in batches, and all versions under the resource are deleted', - err_msg varchar(2000) DEFAULT NULL COMMENT 'Task failure information.e.getMessage', - start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Starting time', - end_time datetime DEFAULT NULL COMMENT 'End Time', - last_update_time datetime NOT NULL COMMENT 'Last update time', - PRIMARY KEY (id) +CREATE TABLE linkis_ps_bml_resources_task ( + id bigint(20) NOT NULL AUTO_INCREMENT, + resource_id varchar(50) DEFAULT NULL COMMENT 'resource uuid', + version varchar(20) DEFAULT NULL COMMENT 'Resource version number of the current operation', + operation varchar(20) NOT NULL COMMENT 'Operation type. upload = 0, update = 1', + state varchar(20) NOT NULL DEFAULT 'Schduled' COMMENT 'Current status of the task:Schduled, Running, Succeed, Failed,Cancelled', + submit_user varchar(20) NOT NULL DEFAULT '' COMMENT 'Job submission user name', + system varchar(20) DEFAULT 'dss' COMMENT 'Subsystem name: wtss', + instance varchar(128) NOT NULL COMMENT 'Material library example', + client_ip varchar(50) DEFAULT NULL COMMENT 'Request IP', + extra_params text COMMENT 'Additional key information. Such as the resource IDs and versions that are deleted in batches, and all versions under the resource are deleted', + err_msg varchar(2000) DEFAULT NULL COMMENT 'Task failure information.e.getMessage', + start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Starting time', + end_time datetime DEFAULT NULL COMMENT 'End Time', + last_update_time datetime NOT NULL COMMENT 'Last update time', + PRIMARY KEY ( id ) ); + DROP TABLE IF EXISTS linkis_ps_bml_resources; -CREATE TABLE linkis_ps_bml_resources ( - id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key', - resource_id varchar(50) NOT NULL COMMENT 'resource uuid', - is_private tinyint(1) DEFAULT '0' COMMENT 'Whether the resource is private, 0 means private, 1 means public', - resource_header tinyint(1) DEFAULT '0' COMMENT 'Classification, 0 means unclassified, 1 means classified', - downloaded_file_name varchar(200) DEFAULT NULL COMMENT 'File name when downloading', - sys varchar(100) NOT NULL COMMENT 'Owning system', - create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Created time', - owner varchar(200) NOT NULL COMMENT 'Resource owner', - is_expire tinyint(1) DEFAULT '0' COMMENT 'Whether expired, 0 means not expired, 1 means expired', - expire_type varchar(50) DEFAULT NULL COMMENT 'Expiration type, date refers to the expiration on the specified date, TIME refers to the time', - expire_time varchar(50) DEFAULT NULL COMMENT 'Expiration time, one day by default', - max_version int(20) DEFAULT '10' COMMENT 'The default is 10, which means to keep the latest 10 versions', - update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', - updator varchar(50) DEFAULT NULL COMMENT 'updator', - enable_flag tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', - PRIMARY KEY (id) +CREATE TABLE linkis_ps_bml_resources ( + id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key', + resource_id varchar(50) NOT NULL COMMENT 'resource uuid', + is_private tinyint(1) DEFAULT '0' COMMENT 'Whether the resource is private, 0 means private, 1 means public', + resource_header tinyint(1) DEFAULT '0' COMMENT 'Classification, 0 means unclassified, 1 means classified', + downloaded_file_name varchar(200) DEFAULT NULL COMMENT 'File name when downloading', + sys varchar(100) NOT NULL COMMENT 'Owning system', + create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Created time', + owner varchar(200) NOT NULL COMMENT 'Resource owner', + is_expire tinyint(1) DEFAULT '0' COMMENT 'Whether expired, 0 means not expired, 1 means expired', + expire_type varchar(50) DEFAULT NULL COMMENT 'Expiration type, date refers to the expiration on the specified date, TIME refers to the time', + expire_time varchar(50) DEFAULT NULL COMMENT 'Expiration time, one day by default', + max_version int(20) DEFAULT '10' COMMENT 'The default is 10, which means to keep the latest 10 versions', + update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Updated time', + updator varchar(50) DEFAULT NULL COMMENT 'updator', + enable_flag tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', + PRIMARY KEY ( id ) ); + DROP TABLE IF EXISTS linkis_ps_resources_download_history; -CREATE TABLE linkis_ps_resources_download_history ( - id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'primary key', - start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'start time', - end_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'stop time', - client_ip varchar(200) NOT NULL COMMENT 'client ip', - state tinyint(1) NOT NULL COMMENT 'Download status, 0 download successful, 1 download failed', - resource_id varchar(50) NOT NULL, - version varchar(20) NOT NULL, - downloader varchar(50) NOT NULL COMMENT 'Downloader', - PRIMARY KEY (id) -); -DROP TABLE IF EXISTS linkis_ps_bml_resources_version; -CREATE TABLE linkis_ps_bml_resources_version ( - id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'Primary key', - resource_id varchar(50) NOT NULL COMMENT 'Resource uuid', - file_md5 varchar(32) NOT NULL COMMENT 'Md5 summary of the file', - version varchar(20) NOT NULL COMMENT 'Resource version (v plus five digits)', - size int(10) NOT NULL COMMENT 'File size', - start_byte BIGINT(20) UNSIGNED NOT NULL DEFAULT 0, - end_byte BIGINT(20) UNSIGNED NOT NULL DEFAULT 0, - resource varchar(2000) NOT NULL COMMENT 'Resource content (file information including path and file name)', - description varchar(2000) DEFAULT NULL COMMENT 'description', - start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Started time', - end_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'Stoped time', - client_ip varchar(200) NOT NULL COMMENT 'Client ip', - updator varchar(50) DEFAULT NULL COMMENT 'updator', - enable_flag tinyint(1) NOT NULL DEFAULT '1' COMMENT 'Status, 1: normal, 0: frozen', - unique key uniq_rid_version(resource_id, version), - PRIMARY KEY (id) -); -DROP TABLE IF EXISTS linkis_ps_bml_project; -create table linkis_ps_bml_project( - id int(10) NOT NULL AUTO_INCREMENT, - name varchar(128) DEFAULT NULL, - system varchar(64) not null default NULL, - source varchar(1024) default null, - description varchar(1024) default null, - creator varchar(128) not null, - enabled tinyint default 1, - create_time datetime DEFAULT now(), - unique key uniq_name (name), - PRIMARY KEY (id) +CREATE TABLE linkis_ps_resources_download_history ( + id bigint(20) NOT NULL AUTO_INCREMENT COMMENT 'primary key', + start_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'start time', + end_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT 'stop time', + client_ip varchar(200) NOT NULL COMMENT 'client ip', + state tinyint(1) NOT NULL COMMENT 'Download status, 0 download successful, 1 download failed', + resource_id varchar(50) NOT NULL, + version varchar(20) NOT NULL, + downloader varchar(50) NOT NULL COMMENT 'Downloader', + PRIMARY KEY ( id ) ); + insert ignore into linkis_ps_bml_project_user(project_id, username, priv, creator, create_time) values ( 1, 'creCreatorUser', 2, 'creatorTest', now()); -insert ignore into linkis_ps_bml_project(name, system, source, description, creator, enabled, create_time)values('testName', 'testSy','test', 'descTest','creCreatorUser', 1, now()); +insert ignore into linkis_ps_bml_project(name, `system`, source, description, creator, enabled, create_time)values('testName', 'testSy','test', 'descTest','creCreatorUser', 1, now()); insert ignore into linkis_ps_bml_project_resource(project_id, resource_id) values(1, '123'); \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties b/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties index 3bf91ee768..acc1c1b034 100644 --- a/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-configuration/src/test/resources/application.properties @@ -34,16 +34,22 @@ wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configur #logging.file=./test.log #debug=true -#h2 database config spring.datasource.driver-class-name=org.h2.Driver -#init spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.schema=classpath:create.sql +spring.datasource.data=classpath:data.sql spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 spring.main.web-application-type=servlet server.port=1234 diff --git a/linkis-public-enhancements/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java b/linkis-public-enhancements/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java index 5397089555..4c5fcb97a8 100644 --- a/linkis-public-enhancements/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java +++ b/linkis-public-enhancements/linkis-cs-server/src/test/java/org/apache/linkis/cs/server/conf/ContextServerConfTest.java @@ -37,5 +37,6 @@ public void constTest() { Assertions.assertTrue(100 == csSchedulerMaxRunningJobs); Assertions.assertTrue(1000 == csSchedulerMaxAskExecutorTimes); Assertions.assertTrue(10000 == csSchedulerJobWaitMills); + Assertions.assertTrue("cs_1_dev" == confLabel); } } diff --git a/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.properties index 037eca4fb9..b9ed613e62 100644 --- a/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.properties @@ -30,16 +30,20 @@ wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configur #logging.file=./test.log #debug=true -#h2 database config spring.datasource.driver-class-name=org.h2.Driver -#init -spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 spring.main.web-application-type=servlet server.port=1234 diff --git a/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.yml b/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.yml index 2507d1883c..e1290d6d79 100644 --- a/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.yml +++ b/linkis-public-enhancements/linkis-cs-server/src/test/resources/application.yml @@ -16,15 +16,6 @@ server: port: 9010 spring: - cloud: - loadbalancer: - cache: - enabled: false - main: - allow-circular-references: true - mvc: - pathmatch: - matching-strategy: ant_path_matcher application: name: linkis-ps-cs @@ -42,7 +33,3 @@ management: web: exposure: include: refresh,info - -knife4j: - enable: false - production: false \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-cs-server/src/test/resources/create.sql b/linkis-public-enhancements/linkis-cs-server/src/test/resources/create.sql index f6bf76e496..33956b3fb5 100644 --- a/linkis-public-enhancements/linkis-cs-server/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-cs-server/src/test/resources/create.sql @@ -47,7 +47,7 @@ CREATE TABLE linkis_ps_cs_context_listener ( DROP TABLE IF EXISTS linkis_ps_cs_context_id CASCADE; CREATE TABLE linkis_ps_cs_context_id ( id int(11) AUTO_INCREMENT, - `user` varchar(32) DEFAULT NULL, + user varchar(32) DEFAULT NULL, application varchar(32) DEFAULT NULL, source varchar(255) DEFAULT NULL, expire_type varchar(32) DEFAULT NULL, @@ -69,20 +69,4 @@ CREATE TABLE linkis_ps_cs_context_map_listener ( create_time datetime DEFAULT CURRENT_TIMESTAMP, access_time datetime DEFAULT CURRENT_TIMESTAMP, PRIMARY KEY (id) -) ; - -DROP TABLE IF EXISTS linkis_ps_cs_context_map CASCADE; -CREATE TABLE linkis_ps_cs_context_map ( - id int(11) AUTO_INCREMENT, - `key` varchar(128) DEFAULT NULL, - context_scope varchar(32) DEFAULT NULL, - context_type varchar(32) DEFAULT NULL, - props varchar(255), - `value` varchar(255), - context_id int(11) DEFAULT NULL, - keywords varchar(255) DEFAULT NULL, - update_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - create_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - access_time datetime NOT NULL DEFAULT CURRENT_TIMESTAMP, - PRIMARY KEY (id) ) ; \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/resources/application.properties index e95a393ae3..2adc4e001b 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-datasource/linkis-datasource-manager/server/src/test/resources/application.properties @@ -18,13 +18,15 @@ #h2 database config spring.datasource.driver-class-name=org.h2.Driver #init -spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true; +#spring.datasource.url=jdbc:h2:mem:testPgDb;MODE=PostgreSQL;IGNORECASE=TRUE;DATABASE_TO_LOWER=TRUE; spring.datasource.username=sa spring.datasource.password= spring.sql.init.schema-locations=classpath:create.sql springfox.documentation.enabled=false springfox.documentation.auto-startup=false springfox.documentation.swagger-ui.enabled=false +#spring.datasource.schema=classpath:create_pg.sql mybatis-plus.mapper-locations=classpath*:mapper/common/*.xml,classpath*:mapper/mysql/*.xml diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties index e33693d392..6071e0acce 100644 --- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/test/resources/application.properties @@ -30,9 +30,7 @@ wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configur #logging.file=./test.log #debug=true -#h2 database config spring.datasource.driver-class-name=org.h2.Driver -#init spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true spring.datasource.username=sa spring.datasource.password= diff --git a/linkis-public-enhancements/linkis-instance-label-server/src/test/resources/application.properties b/linkis-public-enhancements/linkis-instance-label-server/src/test/resources/application.properties index eec1dcc65a..b8b65f39b6 100644 --- a/linkis-public-enhancements/linkis-instance-label-server/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-instance-label-server/src/test/resources/application.properties @@ -18,7 +18,8 @@ #h2 database config spring.datasource.driver-class-name=org.h2.Driver #init -spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +#spring.datasource.url=jdbc:h2:mem:testPgDb;MODE=PostgreSQL;IGNORECASE=TRUE;DATABASE_TO_LOWER=TRUE; spring.datasource.username=sa spring.datasource.password= spring.datasource.schema=classpath:create.sql @@ -32,8 +33,4 @@ mybatis-plus.configuration.log-impl=org.apache.ibatis.logging.stdout.StdOutImpl #disable eureka discovery client spring.cloud.service-registry.auto-registration.enabled=false eureka.client.enabled=false -eureka.client.serviceUrl.registerWithEureka=false -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false - +eureka.client.serviceUrl.registerWithEureka=false \ No newline at end of file diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/application.properties b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/application.properties index e93b2bd4e5..88aba01282 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/application.properties @@ -30,16 +30,20 @@ wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configur #logging.file=./test.log #debug=true -#h2 database config spring.datasource.driver-class-name=org.h2.Driver -#init -spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true;INIT=runscript from 'classpath:create.sql' spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 spring.main.web-application-type=servlet server.port=1234 diff --git a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql index ec9ccb4a99..8d7bd16fa1 100644 --- a/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql +++ b/linkis-public-enhancements/linkis-jobhistory/src/test/resources/create.sql @@ -61,4 +61,4 @@ CREATE TABLE linkis_ps_job_history_group_history ( ) ; INSERT INTO linkis_ps_job_history_group_history (job_req_id,submit_user,execute_user,source,labels,params,progress,status,log_path,error_code,error_desc,created_time,updated_time,instances,metrics,engine_type,execution_code,result_location) VALUES - ('LINKISCLI_hadoop_spark_0','hadoop','hadoop','{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"}','{"userCreator":"hadoop-LINKISCLI","engineType":"spark-3.0.1","codeType":"sql","executeOnce":""}','{"configuration":{"startup":{},"runtime":{"hive.resultset.use.unique.column.names":true,"wds.linkis.resultSet.store.path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1","source":{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"},"job":{"resultsetIndex":0,"#rt_rs_store_path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"}}},"variable":{}}','1.0','Succeed','hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/1.log',0,'',now(),now(),'127.0.0.1:9104','{"scheduleTime":"2022-07-14T18:58:40+0800","timeToOrchestrator":"2022-07-14T18:58:41+0800","submitTime":"2022-07-14T18:58:39+0800","yarnResource":{"application_1657595967414_0003":{"queueMemory":1073741824,"queueCores":1,"queueInstances":0,"jobStatus":"RUNNING","queue":"default"}},"completeTime":"2022-07-14T18:59:51+0800"}','spark','show databases;','hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1'); + ('LINKISCLI_hadoop_spark_0','hadoop','hadoop','{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"}','{"userCreator":"hadoop-LINKISCLI","engineType":"spark-3.0.1","codeType":"sql","executeOnce":""}','{"configuration":{"startup":{},"runtime":{"hive.resultset.use.unique.column.names":true,"wds.linkis.resultSet.store.path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1","source":{"scriptPath":"LinkisCli","requestIP":"127.0.0.1"},"job":{"resultsetIndex":0,"#rt_rs_store_path":"hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1"}}},"variable":{}}','1.0','Succeed','hdfs:///tmp/linkis/log/2022-07-14/LINKISCLI/hadoop/1.log',0,'','2022-07-14 18:58:39.019000000','2022-07-14 18:59:51.589000000','127.0.0.1:9104','{"scheduleTime":"2022-07-14T18:58:40+0800","timeToOrchestrator":"2022-07-14T18:58:41+0800","submitTime":"2022-07-14T18:58:39+0800","yarnResource":{"application_1657595967414_0003":{"queueMemory":1073741824,"queueCores":1,"queueInstances":0,"jobStatus":"RUNNING","queue":"default"}},"completeTime":"2022-07-14T18:59:51+0800"}','spark','show databases;','hdfs:///tmp/linkis/hadoop/linkis/20220714_185840/LINKISCLI/1'); diff --git a/linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/application.properties b/linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/application.properties index 8f29efb0fe..2f7d2ea8b4 100644 --- a/linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-ps-common-lock/src/test/resources/application.properties @@ -30,16 +30,22 @@ wds.linkis.gateway.conf.publicservice.list=query,jobhistory,application,configur #logging.file=./test.log #debug=true -#h2 database config spring.datasource.driver-class-name=org.h2.Driver -#init spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.schema=classpath:create.sql +spring.datasource.data=classpath:data.sql spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 spring.main.web-application-type=servlet server.port=1234 diff --git a/linkis-public-enhancements/linkis-udf-service/src/test/resources/application.properties b/linkis-public-enhancements/linkis-udf-service/src/test/resources/application.properties index 6d0bdf1163..075dafbfb1 100644 --- a/linkis-public-enhancements/linkis-udf-service/src/test/resources/application.properties +++ b/linkis-public-enhancements/linkis-udf-service/src/test/resources/application.properties @@ -30,16 +30,22 @@ wds.linkis.login_encrypt.enable=false #logging.file=./test.log #debug=true -#h2 database config -spring.datasource.driver-class-name=org.h2.Driver -#init +ng.datasource.driver-class-name=org.h2.Driver spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true +spring.datasource.schema=classpath:create.sql +spring.datasource.data=classpath:data.sql spring.datasource.username=sa spring.datasource.password= -spring.sql.init.schema-locations=classpath:create.sql -springfox.documentation.enabled=false -springfox.documentation.auto-startup=false -springfox.documentation.swagger-ui.enabled=false +spring.datasource.hikari.connection-test-query=select 1 +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.auto-commit=true +spring.datasource.hikari.validation-timeout=3000 +spring.datasource.hikari.pool-name=linkis-test +spring.datasource.hikari.maximum-pool-size=50 +spring.datasource.hikari.connection-timeout=30000 +spring.datasource.hikari.idle-timeout=600000 +spring.datasource.hikari.leak-detection-threshold=0 +spring.datasource.hikari.initialization-fail-timeout=1 spring.main.web-application-type=servlet server.port=1234 diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/application.properties b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/application.properties index a78f26d901..82b0cc6314 100644 --- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/application.properties +++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-authentication/src/test/resources/application.properties @@ -30,20 +30,17 @@ wds.linkis.login_encrypt.enable=false #logging.file=./test.log #debug=true -#h2 database config spring.datasource.driver-class-name=org.h2.Driver -#init spring.datasource.url=jdbc:h2:mem:test;MODE=MySQL;DB_CLOSE_DELAY=-1;DATABASE_TO_LOWER=true -spring.datasource.username=sa -spring.datasource.password= spring.sql.init.schema-locations=classpath:create.sql springfox.documentation.enabled=false springfox.documentation.auto-startup=false springfox.documentation.swagger-ui.enabled=false - #pgtest #spring.datasource.url=jdbc:h2:mem:testPgDb;MODE=PostgreSQL;IGNORECASE=TRUE;DATABASE_TO_LOWER=TRUE; #spring.datasource.schema=classpath:create_pg.sql +spring.datasource.username=sa +spring.datasource.password= spring.main.web-application-type=servlet server.port=1234 diff --git a/tool/dependencies/known-dependencies.txt b/tool/dependencies/known-dependencies.txt index 1f2e289554..b8c73b6c88 100644 --- a/tool/dependencies/known-dependencies.txt +++ b/tool/dependencies/known-dependencies.txt @@ -32,7 +32,6 @@ asm-analysis-9.3.jar asm-commons-9.3.jar asm-tree-9.3.jar aspectjweaver-1.9.7.jar -attoparser-2.0.5.RELEASE.jar audience-annotations-0.13.0.jar audience-annotations-0.5.0.jar automaton-1.11-8.jar @@ -698,10 +697,6 @@ spring-beans-5.3.27.jar spring-boot-2.7.11.jar spring-boot-actuator-2.7.11.jar spring-boot-actuator-autoconfigure-2.7.11.jar -spring-boot-admin-server-2.7.16.jar -spring-boot-admin-server-cloud-2.7.16.jar -spring-boot-admin-server-ui-2.7.16.jar -spring-boot-admin-starter-server-2.7.16.jar spring-boot-autoconfigure-2.7.11.jar spring-boot-starter-2.7.11.jar spring-boot-starter-actuator-2.7.11.jar @@ -714,7 +709,6 @@ spring-boot-starter-json-2.7.11.jar spring-boot-starter-log4j2-2.7.11.jar spring-boot-starter-quartz-2.7.11.jar spring-boot-starter-reactor-netty-2.7.11.jar -spring-boot-starter-thymeleaf-2.7.11.jar spring-boot-starter-validation-2.7.11.jar spring-boot-starter-web-2.7.11.jar spring-boot-starter-webflux-2.7.11.jar @@ -772,9 +766,6 @@ swagger-models-2.1.2.jar tephra-api-0.6.0.jar tephra-core-0.6.0.jar tephra-hbase-compat-1.0-0.6.0.jar -thymeleaf-3.0.15.RELEASE.jar -thymeleaf-extras-java8time-3.0.4.RELEASE.jar -thymeleaf-spring5-3.0.15.RELEASE.jar token-provider-1.0.1.jar tomcat-embed-el-9.0.74.jar transaction-api-1.1.jar @@ -786,7 +777,6 @@ twill-discovery-api-0.6.0-incubating.jar twill-discovery-core-0.6.0-incubating.jar twill-zookeeper-0.6.0-incubating.jar txw2-2.3.8.jar -unbescape-1.1.6.RELEASE.jar units-1.3.jar units-1.6.jar validation-api-2.0.1.Final.jar