From e9776e3bf1459504afa30dd6204d5e1ec9aac25a Mon Sep 17 00:00:00 2001 From: peacewong Date: Wed, 25 May 2022 11:53:03 +0800 Subject: [PATCH 1/3] Added instructions for other upper-level components to be compatible with 1.1.1 --- docs/deployment/quick_deploy.md | 6 ++++++ .../current/deployment/quick_deploy.md | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/docs/deployment/quick_deploy.md b/docs/deployment/quick_deploy.md index c59aee52fc2..7fdcadaadaa 100644 --- a/docs/deployment/quick_deploy.md +++ b/docs/deployment/quick_deploy.md @@ -243,6 +243,12 @@ cp mysql-connector-java-5.1.49.jar {LINKIS_HOME}/lib/linkis-commons/public-modul ### 5. Linkis quick startup +**Notice** that if you use dss or other projects that rely on linkis version < 1.1.1, you also need to modify the linkis.properties file: +```shell +echo "wds.linkis.session.ticket.key=bdp-user-ticket-id" >> linkis.properties +``` + + (1). Start services Run the following commands on the installation directory to start all services. diff --git a/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/quick_deploy.md b/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/quick_deploy.md index 1ccc4d1de36..697ee3dc217 100644 --- a/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/quick_deploy.md +++ b/i18n/zh-CN/docusaurus-plugin-content-docs/current/deployment/quick_deploy.md @@ -259,6 +259,11 @@ cp mysql-connector-java-5.1.49.jar {LINKIS_HOME}/lib/linkis-commons/public-modu ### 4.5 快速启动Linkis +**注意** 如果您用的dss或者其他项目依赖的linkis版本<1.1.1,还需要修改linkis.properties文件: +```shell +echo "wds.linkis.session.ticket.key=bdp-user-ticket-id" >> linkis.properties +``` + #### 4.5.1 启动服务: 在安装目录执行以下命令,启动所有服务: From dc582541d860aec8d397a64b84cbfefab9a740af Mon Sep 17 00:00:00 2001 From: peacewong Date: Wed, 25 May 2022 12:06:36 +0800 Subject: [PATCH 2/3] optimize sdk manual --- docs/user_guide/sdk_manual.md | 12 +++++----- .../current/user_guide/sdk_manual.md | 23 ++++++++++--------- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/docs/user_guide/sdk_manual.md b/docs/user_guide/sdk_manual.md index 509d5dc6e55..30f36b433cd 100644 --- a/docs/user_guide/sdk_manual.md +++ b/docs/user_guide/sdk_manual.md @@ -117,7 +117,7 @@ public class LinkisClientTest { // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant Map labels = new HashMap(); labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label - labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator + labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// required execute user and creator labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType // set start up map :engineConn start params Map startupMap = new HashMap(16); @@ -155,7 +155,7 @@ public class LinkisClientTest { // 2. build JobExecuteAction (0.X old way of using) JobExecuteAction executionAction = JobExecuteAction.builder() - .setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation + .setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation .addExecuteCode(code) //Execution Code .setEngineTypeStr("spark") // engineConn type .setRunTypeStr("py") // code type @@ -275,14 +275,14 @@ object LinkisClientTest { // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant val labels: util.Map[String, Any] = new util.HashMap[String, Any] labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label - labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator + labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// required execute user and creator labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType val startupMap = new java.util.HashMap[String, Any]() // Support setting engine native parameters,For example: parameters of engines such as spark/hive startupMap.put("spark.executor.instances", 2); // setting linkis params - startupMap.put("wds.linkis.rm.yarnqueue", "dws"); + startupMap.put("wds.linkis.rm.yarnqueue", "default"); // 2. build jobSubmitAction val jobSubmitAction = JobSubmitAction.builder .addExecuteCode(code) @@ -309,10 +309,10 @@ object LinkisClientTest { // Support setting engine native parameters,For example: parameters of engines such as spark/hive startupMap.put("spark.executor.instances", 2) // setting linkis params - startupMap.put("wds.linkis.rm.yarnqueue", "dws") + startupMap.put("wds.linkis.rm.yarnqueue", "default") // 2. build JobExecuteAction (0.X old way of using) val executionAction = JobExecuteAction.builder() - .setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation + .setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation .addExecuteCode(code) //Execution Code .setEngineTypeStr("spark") // engineConn type .setRunTypeStr("py") // code type diff --git a/i18n/zh-CN/docusaurus-plugin-content-docs/current/user_guide/sdk_manual.md b/i18n/zh-CN/docusaurus-plugin-content-docs/current/user_guide/sdk_manual.md index 599de273270..3a7db6550d9 100644 --- a/i18n/zh-CN/docusaurus-plugin-content-docs/current/user_guide/sdk_manual.md +++ b/i18n/zh-CN/docusaurus-plugin-content-docs/current/user_guide/sdk_manual.md @@ -57,7 +57,7 @@ public class LinkisClientTest { .readTimeout(30000) //set read timeout .setAuthenticationStrategy(new StaticAuthenticationStrategy()) //AuthenticationStrategy Linkis authen suppory static and Token .setAuthTokenKey("hadoop") // set submit user - .setAuthTokenValue("hadoop"))) // set passwd or token (setAuthTokenValue("BML-AUTH")) + .setAuthTokenValue("hadoop"))) // set passwd or token (setAuthTokenValue("test")) .setDWSVersion("v1") //linkis rest version v1 .build(); @@ -72,9 +72,9 @@ public class LinkisClientTest { try { System.out.println("user : " + user + ", code : [" + executeCode + "]"); - // 3. build job and execute + // 3.推荐用submit的方式,可以指定任务相关的label支持更多特性 JobExecuteResult jobExecuteResult = toSubmit(user, executeCode); - //0.x:JobExecuteResult jobExecuteResult = toExecute(user, executeCode); + //0.x兼容的方式,不推荐使用:JobExecuteResult jobExecuteResult = toExecute(user, executeCode); System.out.println("execId: " + jobExecuteResult.getExecID() + ", taskId: " + jobExecuteResult.taskID()); // 4. get job jonfo JobInfoResult jobInfoResult = client.getJobInfo(jobExecuteResult); @@ -115,8 +115,8 @@ public class LinkisClientTest { // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant Map labels = new HashMap(); labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label - labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator - labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType + labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// 请求的用户和应用名,两个参数都不能少,其中APPName不能带有"-"建议替换为"_" + labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // 指定脚本类型 // set start up map :engineConn start params Map startupMap = new HashMap(16); // Support setting engine native parameters,For example: parameters of engines such as spark/hive @@ -153,7 +153,7 @@ public class LinkisClientTest { // 2. build JobExecuteAction (0.X old way of using) JobExecuteAction executionAction = JobExecuteAction.builder() - .setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation + .setCreator("AppName") //creator, the system name of the client requesting linkis, used for system-level isolation .addExecuteCode(code) //Execution Code .setEngineTypeStr("spark") // engineConn type .setRunTypeStr("py") // code type @@ -217,6 +217,7 @@ object LinkisClientTest { try { // 3. build job and execute println("user : " + user + ", code : [" + executeCode + "]") + //推荐使用submit,支持传递任务label val jobExecuteResult = toSubmit(user, executeCode) //0.X: val jobExecuteResult = toExecute(user, executeCode) println("execId: " + jobExecuteResult.getExecID + ", taskId: " + jobExecuteResult.taskID) @@ -271,14 +272,14 @@ object LinkisClientTest { // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant val labels: util.Map[String, Any] = new util.HashMap[String, Any] labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // required engineType Label - labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// required execute user and creator - labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // required codeType + labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-APPName");// 请求的用户和应用名,两个参数都不能少,其中APPName不能带有"-"建议替换为"_" + labels.put(LabelKeyConstant.CODE_TYPE_KEY, "py"); // 指定脚本类型 val startupMap = new java.util.HashMap[String, Any]() // Support setting engine native parameters,For example: parameters of engines such as spark/hive startupMap.put("spark.executor.instances", 2); // setting linkis params - startupMap.put("wds.linkis.rm.yarnqueue", "dws"); + startupMap.put("wds.linkis.rm.yarnqueue", "default"); // 2. build jobSubmitAction val jobSubmitAction = JobSubmitAction.builder .addExecuteCode(code) @@ -305,10 +306,10 @@ object LinkisClientTest { // Support setting engine native parameters,For example: parameters of engines such as spark/hive startupMap.put("spark.executor.instances", 2) // setting linkis params - startupMap.put("wds.linkis.rm.yarnqueue", "dws") + startupMap.put("wds.linkis.rm.yarnqueue", "default") // 2. build JobExecuteAction (0.X old way of using) val executionAction = JobExecuteAction.builder() - .setCreator("IDE") //creator, the system name of the client requesting linkis, used for system-level isolation + .setCreator("APPName") //creator, the system name of the client requesting linkis, used for system-level isolation .addExecuteCode(code) //Execution Code .setEngineTypeStr("spark") // engineConn type .setRunTypeStr("py") // code type From 708a2aaa24b5b5b2a0989e55c11ab1d4b8159df4 Mon Sep 17 00:00:00 2001 From: peacewong Date: Wed, 25 May 2022 12:06:58 +0800 Subject: [PATCH 3/3] optimize config doc --- docs/development/linkis_config.md | 4 ++-- .../current/development/linkis_config.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/development/linkis_config.md b/docs/development/linkis_config.md index ec801a506d6..b2b9d8148c3 100644 --- a/docs/development/linkis_config.md +++ b/docs/development/linkis_config.md @@ -106,8 +106,8 @@ Note: When submitting client parameters, only engine-related parameters, tag par labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // Specify engine type and version labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// Specify the running user and your APPName labels.put(LabelKeyConstant.CODE_TYPE_KEY, "sql"); // Specify the type of script to run: spark supports: sql, scala, py; Hive: hql; shell: sh; python: python; presto: psql - labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//The job runs for 10s and automatically initiates Kill, the unit is ms - labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//The job is queued for more than 10s and automatically initiates Kill, the unit is ms + labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//The job runs for 10s and automatically initiates Kill, the unit is s + labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//The job is queued for more than 10s and automatically initiates Kill, the unit is s labels.put(LabelKeyConstant.RETRY_TIMEOUT_KEY, "10000");//The waiting time for the job to retry due to resources and other reasons, the unit is ms. If it fails due to insufficient queue resources, it will initiate 10 retries at intervals by default labels.put(LabelKeyConstant.TENANT_KEY,"hduser02");//Tenant label, if the tenant parameter is specified for the task, the task will be routed to a separate ECM machine labels.put(LabelKeyConstant.EXECUTE_ONCE_KEY,"");//Execute the label once, this parameter is not recommended to be set. After setting, the engine will not reuse the task and the engine will end after running. Only a certain task parameter can be specialized. set up diff --git a/i18n/zh-CN/docusaurus-plugin-content-docs/current/development/linkis_config.md b/i18n/zh-CN/docusaurus-plugin-content-docs/current/development/linkis_config.md index f77846e2565..73c768ed395 100644 --- a/i18n/zh-CN/docusaurus-plugin-content-docs/current/development/linkis_config.md +++ b/i18n/zh-CN/docusaurus-plugin-content-docs/current/development/linkis_config.md @@ -106,8 +106,8 @@ linkis-cli -runtieMap key1=value -runtieMap key2=value labels.put(LabelKeyConstant.ENGINE_TYPE_KEY, "spark-2.4.3"); // 指定引擎类型和版本 labels.put(LabelKeyConstant.USER_CREATOR_TYPE_KEY, user + "-IDE");// 指定运行的用户和您的APPName labels.put(LabelKeyConstant.CODE_TYPE_KEY, "sql"); // 指定运行的脚本类型:spark支持:sql、scala、py;Hive:hql;shell:sh;python:python;presto:psql - labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//job运行10s没完成自动发起Kill,单位为ms - labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//job排队超过10s没完成自动发起Kill,单位为ms + labels.put(LabelKeyConstant.JOB_RUNNING_TIMEOUT_KEY, "10000");//job运行10s没完成自动发起Kill,单位为s + labels.put(LabelKeyConstant.JOB_QUEUING_TIMEOUT_KEY, "10000");//job排队超过10s没完成自动发起Kill,单位为s labels.put(LabelKeyConstant.RETRY_TIMEOUT_KEY, "10000");//job因为资源等原因失败重试的等待时间,单位为ms,如因为队列资源不足的失败,会默认按间隔发起10次重试 labels.put(LabelKeyConstant.TENANT_KEY,"hduser02");//租户标签,任务如果指定了租户参数则任务会被路由到单独的ECM机器 labels.put(LabelKeyConstant.EXECUTE_ONCE_KEY,"");//执行一次标签,该参数不建议设置,设置后引擎不会复用任务运行完就会结束引擎,只有某个任务参数有特殊化的可以进行设置