diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
index 3f683a33f5..3e42b8f138 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
@@ -95,6 +95,12 @@
provided
+
+ org.apache.hive
+ hive-jdbc
+ ${hive.version}
+
+
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
index d1d0fec0f5..24dbf2fb4f 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/restful/api/DataSourceRestfulApi.java
@@ -21,6 +21,7 @@
import org.apache.linkis.metadata.restful.remote.DataSourceRestfulRemote;
import org.apache.linkis.metadata.service.DataSourceService;
import org.apache.linkis.metadata.service.HiveMetaWithPermissionService;
+import org.apache.linkis.metadata.util.HiveService2Utils;
import org.apache.linkis.server.Message;
import org.apache.linkis.server.utils.ModuleUserUtils;
@@ -62,7 +63,12 @@ public class DataSourceRestfulApi implements DataSourceRestfulRemote {
public Message queryDatabaseInfo(HttpServletRequest req) {
String userName = ModuleUserUtils.getOperationUser(req, "get dbs");
try {
- JsonNode dbs = dataSourceService.getDbs(userName);
+ JsonNode dbs;
+ if (HiveService2Utils.checkHiveServer2Enable()) {
+ dbs = HiveService2Utils.getDbs(userName);
+ } else {
+ dbs = dataSourceService.getDbs(userName);
+ }
return Message.ok("").data("dbs", dbs);
} catch (Exception e) {
logger.error("Failed to get database(获取数据库失败)", e);
@@ -137,7 +143,12 @@ public Message queryTables(
String userName = ModuleUserUtils.getOperationUser(req, "get tables");
MetadataQueryParam queryParam = MetadataQueryParam.of(userName).withDbName(database);
try {
- JsonNode tables = dataSourceService.queryTables(queryParam);
+ JsonNode tables;
+ if (HiveService2Utils.checkHiveServer2Enable()) {
+ tables = HiveService2Utils.getTables(userName, database);
+ } else {
+ tables = dataSourceService.queryTables(queryParam);
+ }
return Message.ok("").data("tables", tables);
} catch (Exception e) {
logger.error("Failed to queryTables", e);
@@ -160,8 +171,13 @@ public Message queryTableMeta(
MetadataQueryParam queryParam =
MetadataQueryParam.of(userName).withDbName(database).withTableName(table);
try {
- JsonNode columns =
- hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(queryParam);
+ JsonNode columns;
+ if (HiveService2Utils.checkHiveServer2Enable()) {
+ columns = HiveService2Utils.getColumns(userName, database, table);
+ } else {
+ columns =
+ hiveMetaWithPermissionService.getColumnsByDbTableNameAndOptionalUserName(queryParam);
+ }
return Message.ok("").data("columns", columns);
} catch (Exception e) {
logger.error("Failed to get data table structure(获取数据表结构失败)", e);
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
index d44cb0a830..379c09449a 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/DWSConfig.java
@@ -49,4 +49,10 @@ public class DWSConfig {
"wds.linkis.hdfs.rest.errs",
".*Filesystem closed.*|.*Failed to find any Kerberos tgt.*")
.getValue();
+
+ public static CommonVars HIVE_SERVER2_URL =
+ CommonVars$.MODULE$.apply("linkis.hive.server2.address", "jdbc:hive2://127.0.0.1:10000/");
+
+ public static CommonVars HIVE_SERVER2_ENABLE =
+ CommonVars$.MODULE$.apply("linkis.hive.server2.enable", false);
}
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java
new file mode 100644
index 0000000000..0d8e43b99c
--- /dev/null
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/src/main/java/org/apache/linkis/metadata/util/HiveService2Utils.java
@@ -0,0 +1,232 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.linkis.metadata.util;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.node.ArrayNode;
+import com.fasterxml.jackson.databind.node.ObjectNode;
+
+public class HiveService2Utils {
+
+ private static final String driverName = "org.apache.hive.jdbc.HiveDriver";
+
+ private static final String defaultDb = "default";
+ private static final String defaultPassword = "123456";
+
+ static ObjectMapper jsonMapper = new ObjectMapper();
+ static SimpleDateFormat sdf = new SimpleDateFormat("EEE MMM dd HH:mm:ss zzz yyyy", Locale.US);
+
+ /** Determine whether to start hiveServer2 Query the left menu bar */
+ public static Boolean checkHiveServer2Enable() {
+ return DWSConfig.HIVE_SERVER2_ENABLE.getValue();
+ }
+
+ /** Get connection */
+ private static Connection getConn(String username, String db) throws Exception {
+ Class.forName(driverName);
+ String hiveServer2Address = DWSConfig.HIVE_SERVER2_URL.getValue();
+ String url =
+ hiveServer2Address.endsWith("/") ? hiveServer2Address + db : hiveServer2Address + "/" + db;
+ return DriverManager.getConnection(url, username, defaultPassword);
+ }
+
+ /** Get database */
+ public static JsonNode getDbs(String username) throws Exception {
+ ArrayNode dbsNode = jsonMapper.createArrayNode();
+ List dbs = new ArrayList<>();
+ Connection conn = null;
+ Statement stat = null;
+ ResultSet rs = null;
+ try {
+ conn = getConn(username, defaultDb);
+ stat = conn.createStatement();
+ rs = stat.executeQuery("show databases");
+ while (rs.next()) {
+ dbs.add(rs.getString(1));
+ }
+ } finally {
+ close(conn, stat, rs);
+ }
+ for (String db : dbs) {
+ ObjectNode dbNode = jsonMapper.createObjectNode();
+ dbNode.put("dbName", db);
+ dbsNode.add(dbNode);
+ }
+ return dbsNode;
+ }
+
+ /** Gets all tables for the specified database */
+ public static JsonNode getTables(String username, String dbName) throws Exception {
+ ArrayNode tablesNode = jsonMapper.createArrayNode();
+ Connection conn = null;
+ Statement stat = null;
+ ResultSet rs = null;
+ try {
+ List tableNames = new ArrayList<>();
+ conn = getConn(username, dbName);
+ stat = conn.createStatement();
+ rs = stat.executeQuery("show tables");
+ while (rs.next()) {
+ String tableName = rs.getString(1);
+ tableNames.add(tableName);
+ }
+
+ // Get detailed information about each table
+ for (String tableName : tableNames) {
+ ObjectNode tableNode = jsonMapper.createObjectNode();
+ ResultSet describeRs = stat.executeQuery("DESCRIBE FORMATTED " + tableName);
+ while (describeRs.next()) {
+ String columnName = describeRs.getString(1);
+ String dataType = describeRs.getString(2);
+ if (null != columnName) {
+ columnName = columnName.trim();
+ }
+ if (null != dataType) {
+ dataType = dataType.trim();
+ }
+ if (columnName.contains("Owner:")) {
+ tableNode.put("createdBy", dataType);
+ }
+ if (columnName.contains("CreateTime:")) {
+ long createdAt = sdf.parse(dataType).getTime() / 1000;
+ tableNode.put("createdAt", createdAt);
+ break;
+ }
+ }
+ describeRs.close();
+ tableNode.put("databaseName", dbName);
+ tableNode.put("tableName", tableName);
+ tableNode.put("lastAccessAt", 0);
+ tableNode.put("isView", false);
+ tablesNode.add(tableNode);
+ }
+ } finally {
+ close(conn, stat, rs);
+ }
+
+ return tablesNode;
+ }
+
+ /** Gets information about all fields of a specified table */
+ public static JsonNode getColumns(String username, String dbName, String tbName)
+ throws Exception {
+ ArrayNode columnsNode = jsonMapper.createArrayNode();
+ List