diff --git a/linkis-dist/bin/install.sh b/linkis-dist/bin/install.sh index d7e52128ba..d6e50221d5 100644 --- a/linkis-dist/bin/install.sh +++ b/linkis-dist/bin/install.sh @@ -127,6 +127,7 @@ RANDOM_LINKISCLI_TOKEN="LINKISCLI-`cat /proc/sys/kernel/random/uuid | awk -F- '{ if [ $DEBUG_MODE != "true" ];then sed -i ${txt} "s#BML-AUTH#$RANDOM_BML_TOKEN#g" $LINKIS_HOME/conf/linkis-cli/linkis-cli.properties sed -i ${txt} "s#BML-AUTH#$RANDOM_BML_TOKEN#g" $common_conf + sed -i ${txt} "s#BML-AUTH#$RANDOM_BML_TOKEN#g" $LINKIS_HOME/admin/configuration_helper.sh sed -i ${txt} "s#LINKIS_CLI_TEST#$RANDOM_LINKIS_CLI_TEST_TOKEN#g" $common_conf sed -i ${txt} "s#WS-AUTH#$RANDOM_WS_TOKEN#g" $common_conf sed -i ${txt} "s#DSM-AUTH#$RANDOM_DSM_TOKEN#g" $common_conf @@ -532,6 +533,7 @@ sed -i ${txt} "s#spring.eureka.instance.metadata-map.linkis.conf.version.*#spri if [ "$RESULT_SET_ROOT_PATH" != "" ] then sed -i ${txt} "s#wds.linkis.resultSet.store.path.*#wds.linkis.resultSet.store.path=$RESULT_SET_ROOT_PATH#g" $entrance_conf + sed -i ${txt} "s#resultSetRootDir=.*#resultSetRootDir=$RESULT_SET_ROOT_PATH#g" $LINKIS_HOME/admin/linkis_task_res_log_clear.sh fi publicservice_conf=$LINKIS_HOME/conf/linkis-ps-publicservice.properties diff --git a/linkis-dist/package/admin/clear_ec_record.sh b/linkis-dist/package/admin/clear_ec_record.sh new file mode 100644 index 0000000000..5cd1525263 --- /dev/null +++ b/linkis-dist/package/admin/clear_ec_record.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# description: clear linkis_ps_job_history_group_history 10 days ago record +# +if [ -f ${LINKIS_CONF_DIR}/db.sh ] +then + export LINKIS_DB_CONFIG_PATH=${LINKIS_CONF_DIR}/db.sh +else + if [ -f ${LINKIS_HOME}/conf/db.sh ] + then + export LINKIS_DB_CONFIG_PATH=${LINKIS_HOME}/conf/db.sh + else + echo "can not find db.sh" + exit + fi +fi +source ${LINKIS_DB_CONFIG_PATH} + +delete_day=`date -d "-10 days" "+%Y-%m-%d"` +delte_time="$delete_day 00:00:00" +echo "start to delete linkis_cg_ec_resource_info_record before $delte_time" +parm="release_time <=\"$delte_time\" " + +count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_cg_ec_resource_info_record where $parm limit 1"` +maxid=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT MAX(id) FROM linkis_cg_ec_resource_info_record where $parm limit 1"` +echo "will delete count:$count" +echo "maxid:$maxid" + +while [ $count -gt 1 ];do + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "DELETE FROM linkis_cg_ec_resource_info_record where id <= $maxid limit 5000;" + count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_cg_ec_resource_info_record where $parm limit 1"` + echo "count change : $count" + sleep 1s +done + +echo "clear_ec_record.sh over" \ No newline at end of file diff --git a/linkis-dist/package/admin/clear_history_task.sh b/linkis-dist/package/admin/clear_history_task.sh new file mode 100644 index 0000000000..75c49cb715 --- /dev/null +++ b/linkis-dist/package/admin/clear_history_task.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# description: clear linkis_ps_job_history_group_history 3 month record +# +if [ -f ${LINKIS_CONF_DIR}/db.sh ] +then + export LINKIS_DB_CONFIG_PATH=${LINKIS_CONF_DIR}/db.sh +else + if [ -f ${LINKIS_HOME}/conf/db.sh ] + then + export LINKIS_DB_CONFIG_PATH=${LINKIS_HOME}/conf/db.sh + else + echo "can not find db.sh" + exit + fi +fi +source ${LINKIS_DB_CONFIG_PATH} + +delete_day=`date -d "-90 days" "+%Y-%m-%d"` +delte_time="$delete_day 00:00:00" +echo "start to delete linkis_ps_job_history_group_history before $delte_time" +parm="created_time <=\"$delte_time\" " + +count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_ps_job_history_group_history where $parm limit 1 "` +maxid=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT MAX(id) FROM linkis_ps_job_history_group_history where $parm limit 1 "` +echo "will delete count:$count" +echo "maxid:$maxid" + +while [ $count -gt 1 ];do + mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "DELETE FROM linkis_ps_job_history_group_history where id <= $maxid limit 5000;" + count=`mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD $MYSQL_DB -ss -e "SELECT count(1) FROM linkis_ps_job_history_group_history where $parm limit 1 "` + echo "count change : $count" + sleep 1s +done \ No newline at end of file diff --git a/linkis-dist/package/admin/configuration_helper.sh b/linkis-dist/package/admin/configuration_helper.sh new file mode 100644 index 0000000000..8c918dfe61 --- /dev/null +++ b/linkis-dist/package/admin/configuration_helper.sh @@ -0,0 +1,89 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +cd `dirname $0` +cd .. +INSTALL_HOME=`pwd` + + +function print_usage(){ + echo "Usage: configuration_helper.sh [add | get | delete] [engineType] [version] [creator] [configKey] [configValue option]" + echo "get eq: sh configuration_helper.sh get spark 2.4.3 test wds.linkis.rm.instance hadoop" + echo "delete eq: sh configuration_helper.sh delete spark 2.4.3 test wds.linkis.rm.instance hadoop" + echo "add eq: sh configuration_helper.sh add spark 2.4.3 test wds.linkis.rm.instance hadoop 6" + echo "add eq: sh configuration_helper.sh add spark 2.4.3 test wds.linkis.rm.instance hadoop 6 force" + echo "add tips: add with force will ignore check error" + echo "Most commands print help when invoked w/o parameters." +} + +if [ $# -lt 6 ]; then + print_usage + exit 2 +fi + +# set LINKIS_HOME +if [ "$LINKIS_HOME" = "" ]; then + export LINKIS_HOME=$INSTALL_HOME +fi + +# set LINKIS_CONF_DIR +if [ "$LINKIS_CONF_DIR" = "" ]; then + export LINKIS_CONF_DIR=$LINKIS_HOME/conf +fi +linkisMainConf=$LINKIS_CONF_DIR/linkis.properties +gatewayUrl=$(grep wds.linkis.gateway.url $linkisMainConf | cut -d"=" -f2) +echo "gatewayUrl: $gatewayUrl" +engineType=$2 +version=$3 +creator=$4 +configKey=$5 +user=$6 +configValue=$7 +COMMAND=$1 +if [ "$8" = "force" ]; then + force=true +fi + +get() +{ + requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue?creator=$creator&engineType=$engineType&version=$version&configKey=$configKey" + curl --location --request GET $requestUrl -H "Token-Code:BML-AUTH" -H "Token-User:$user" +} + +delete() +{ + requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue" + requestBody="{\"engineType\":\"$engineType\",\"version\":\"$version\",\"creator\":\"$creator\",\"configKey\":\"$configKey\"}" + curl -i -X DELETE $requestUrl -H "Accept: application/json" -H "Content-Type: application/json" -H "Token-Code:BML-AUTH" -H "Token-User:$user" -d "$requestBody" +} + +add() +{ + requestUrl="$gatewayUrl/api/rest_j/v1/configuration/keyvalue" + requestBody="{\"engineType\":\"$engineType\",\"version\":\"$version\",\"creator\":\"$creator\",\"configKey\":\"$configKey\",\"configValue\":\"$configValue\",\"force\":\"$force\",\"user\":\"$user\"}" + curl -i -X POST $requestUrl -H "Accept: application/json" -H "Content-Type: application/json" -H "Token-Code:BML-AUTH" -H "Token-User:hadoop" -d "$requestBody" +} + +case $COMMAND in + add|get|delete) + $COMMAND + ;; + *) + print_usage + exit 2 + ;; +esac diff --git a/linkis-dist/package/admin/linkis_task_res_log_clear.sh b/linkis-dist/package/admin/linkis_task_res_log_clear.sh new file mode 100644 index 0000000000..4272633e23 --- /dev/null +++ b/linkis-dist/package/admin/linkis_task_res_log_clear.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +#!/bin/bash +expiredDays=365 +resultSetRootDir=/tmp/linkis +logRootDir=/tmp/linkis +userResultSetDir=$(hdfs dfs -ls $resultSetRootDir | awk '{print $8}') +realLogRootDir=$logRootDir/log +echo userResultSetDirs: $userResultSetDir +echo realLogRootDir: $realLogRootDir + +if [ -z $LINKIS_LOG_DIR ];then + expiredFileRecordDir=${LINKIS_HOME}/expiredFileRecord +else + expiredFileRecordDir=$LINKIS_LOG_DIR/expiredFileRecord +fi + +function createExpiredFileRecoredDir(){ + if [ ! -d $expiredFileRecordDir ];then + mkdir -p $expiredFileRecordDir + fi +} + +createExpiredFileRecoredDir +expireDate=$(date -d -${expiredDays}day +%Y-%m-%d) +expireResultSetFile=$expiredFileRecordDir/linkis_expire_resultset_dir_${expireDate}.txt +expireLogFile=$expiredFileRecordDir/linkis_expire_log_dir_${expireDate}.txt + +hdfs dfs -ls $realLogRootDir | awk '$8 ~ /.*linkis\/log\/[0-9|\-|\_]*/ {cmd = "date -d -12month +%Y-%m-%d";cmd | getline oneMonthAgo;if($6 < oneMonthAgo) print $8}' >> $expireLogFile + +for i in $userResultSetDir +do + hdfs dfs -ls $i/linkis | awk '$8 ~ /.*linkis\/[0-9\-]{10}/ {cmd = "date -d -12month +%Y-%m-%d";cmd | getline oneMonthAgo;if($6 < oneMonthAgo) print $8}' >> $expireResultSetFile +done + +cat $expireLogFile | xargs -n 1000 hdfs dfs -rm -r -f + +cat $expireResultSetFile | xargs -n 1000 hdfs dfs -rm -r -f + + diff --git a/linkis-dist/package/conf/linkis-et-monitor-file.properties b/linkis-dist/package/conf/linkis-et-monitor-file.properties new file mode 100644 index 0000000000..22a45841d8 --- /dev/null +++ b/linkis-dist/package/conf/linkis-et-monitor-file.properties @@ -0,0 +1,143 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +jobhistory.errorcode.01002={"alert_title":"Linkis Service load is too high, please contact Linkis owner","alert_info":"Linkis Linkis Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01003={"alert_title":"Linkis Service load is too high, please contact Linkis owner","alert_info":"Linkis Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01101={"alert_title":"Linkis Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01102={"alert_title":"Linkis ECM memory Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01103={"alert_title":"Linkis ECM CPU Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01104={"alert_title":"Linkis ECM instances Service load is too high, please contact Linkis owner","alert_info":" Service load is too high, please contact Linkis owner","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.errorcode.01105={"alert_title":"Linkis Memory Service load is too high, please contact Linkis owner","alert_info":"The machine has insufficient memory. Please contact the administrator to expand the memory.","alert_way":"1,2,3","alert_reciver":"hadoop","alert_level":"2","alert_obj":"linkis-alert-obj","can_recover":"0"} +jobhistory.unfinished.time.exceed.sec.43200={"alert_title":"Linkis task execute timeout","alert_info":"Linkis task execute timeout 12h","alert_way":"1,2,3","alert_reciver":"","alert_level":"4","alert_obj":"Linkis","can_recover":"0"} + +ecm.resource.monitor.im.12003=\ + {"alert_title":"Linkis Monitor Resource Alert",\ + "alert_info":" $instance ECM Memory/cpu insufficient resource < 0.2 please contact Linkis owner: $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12004=\ + {"alert_title":"Linkis Monitor Resource Alert",\ + "alert_info":" $instance ECM Memory/cpu insufficient resource < 0.2 please contact Linkis owner: $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12005=\ + {"alert_title":"Linkis entrance user running task monitor",\ + "alert_info":"User $username runninng task at linkis ( $url ) > $runningtask ,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12006=\ + {"alert_title":"Linkis entrance user queued task monitor",\ + "alert_info":"User $username queued task at linkis ( $url ) > $queuedtask ,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12007=\ + {"alert_title":"Linkis entrance user total task monitor",\ + "alert_info":"User $username queued task at linkis ( $url ) > $tasktotal ,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12008=\ + {"alert_title":"Linkis entrance all running task monitor",\ + "alert_info":"linkis ( $url ) running task > $taskminor,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +ecm.resource.monitor.im.12009=\ + {"alert_title":"Linkis entrance all running task monitor",\ + "alert_info":"linkis ( $url ) all task > $taskmajor,please attention",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + + + +user.mode.monitor.im.12011=\ + {"alert_title":"User mode execution timeout alarm",\ + "alert_info":"User mode execution timeout alarm Linkis url: $url engineType:$engineType Task ID: $jobId please attention $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"alexyang,hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +user.mode.monitor.im.12012=\ + {"alert_title":"User mode execution failure alarm",\ + "alert_info":"User mode execution failure alarm Linkis url: $url Engine: $engineType TaskID: $jobId ER=rrorCode?$errorCode errorMsg: $errorMsg please attention $name",\ + "alert_way":"1,2,3",\ + "alert_reciver":"alexyang,hadoop",\ + "alert_level":"2",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + +user.mode.monitor.im.12017=\ + {"alert_title":"@alert_title",\ + "alert_info":"task execute failed, reason $msg",\ + "alert_way":"1,2,3",\ + "alert_reciver":"@alert_reciver",\ + "alert_level":"@alert_level",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +user.mode.monitor.im.12018=\ + {"alert_title":"@alert_title",\ + "alert_info":"task execute time out $timeout",\ + "alert_way":"1,2,3",\ + "alert_reciver":"@alert_reciver",\ + "alert_level":"@alert_level",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + +thread.monitor.timeout.im.12014=\ + {"alert_title":" Linkis Shell Timeout Alert ",\ + "alert_info":"Monitor Shell execute time out $shellName",\ + "alert_way":"1,2,3",\ + "alert_reciver":"hadoop",\ + "alert_level":"3",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +jobhistory.result.monitor.im.12015=\ + {"alert_title":"Linkis \u4EFB\u52A1\u72B6\u6001\u901A\u77E5",\ + "alert_info":"Task ID:$id final status: $status",\ + "alert_way":"1,2,3",\ + "alert_reciver":"",\ + "alert_level":"4",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} +jobhistory.result.monitor.im.12016=\ + {"alert_title":"@alert_title",\ + "alert_info":"[Alarm time]$date\n[Subsystem]$sysid\n[Alarm IP]$ip\n[Alarm object]$object\n[Alarm information]$detail",\ + "alert_way":"1,2,3",\ + "alert_reciver":"@alert_reciver",\ + "alert_level":"@alert_level",\ + "alert_obj":"linkis_alert",\ + "can_recover":"0"} + + diff --git a/linkis-dist/package/conf/linkis-et-monitor.properties b/linkis-dist/package/conf/linkis-et-monitor.properties new file mode 100644 index 0000000000..6d802941ca --- /dev/null +++ b/linkis-dist/package/conf/linkis-et-monitor.properties @@ -0,0 +1,84 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +wds.linkis.server.mybatis.typeAliasesPackage=org.apache.linkis.monitor.scan.app.instance.entity,org.apache.linkis.monitor.scan.app.jobhistory.entity,org.apache.linkis.bml.cleaner.dao +wds.linkis.server.mybatis.BasePackage=org.apache.linkis.monitor.scan.app.instance.dao,org.apache.linkis.monitor.scan.app.jobhistory.dao,org.apache.linkis.bml.cleaner.dao + + +# alert server url +linkis.alert.url=http://127.0.0.1/ + +# alert receiver +linkis.alert.receiver.default=hadoop + +# monitor ecm resource +linkis.monitor.ecm.resource.cron=0 0/10 * * * ? + +# Resource remaining ratio +linkis.monitor.ecmResourceTask.major=0.005 +linkis.monitor.ecmResourceTask.minor=0.01 + +# entrance task metrics cron +linkis.monitor.entrance.task.cron=0 0/10 * * * ? + +# Timeout task cron +linkis.monitor.jobHistory.timeout.cron=0 0/20 * * * ? + +# time out interval 24h +linkis.monitor.scanner.timeout.interval.seconds=86400 + + +# Finished task cron +linkis.monitor.jobHistory.finished.cron=0 0/20 * * * ? + +# linkis user mode cron +linkis.monitor.user.mode.cron=0 0/20 * * * ? + +# user mode for engine +linkis.monitor.user.enginelist=[{"engineType":"hive-2.3.3","code":"show tables","runType":"hql","executeUser":"hadoop"},\ + {"engineType":"spark-2.4.3","code":"show tables","runType":"sql","executeUser":"hadoop"},\ + {"engineType":"shell-1","code":"pwd","runType":"sh","executeUser":"hadoop"}] + +linkis.monitor.user.mode.timeOut=300 + + +# bml clear cron +linkis.monitor.bml.clear.history.version.cron=0 0 12 * * ? +# bml max version +linkis.bml.cleaner.version.max.num=50 +# keep verssion +linkis.bml.cleaner.version.keep.num=20 +# clear max interval +linkis.bml.cleaner.previous.interval.days=30 +# once scan limit +linkis.bml.cleaner.once.limit.num=100 + +# clear db ec record cron +linkis.monitor.clear.ecRecord.cron=0 10 12 * * ? + +# clear task log cron +linkis.monitor.clear.taskLog.cron=0 10 12 * * ? + +# clear db task history cron +linkis.monitor.clear.history.task.cron=0 0 13 * * ? + +# poll size +linkis.monitor.scheduled.pool.cores.num=20 + +# shell time out conf minute +linkis.monitor.shell.time.out.minute=60 + +##Spring +spring.server.port=9119 diff --git a/linkis-dist/package/sbin/ext/linkis-et-monitor b/linkis-dist/package/sbin/ext/linkis-et-monitor new file mode 100644 index 0000000000..e0c78c487d --- /dev/null +++ b/linkis-dist/package/sbin/ext/linkis-et-monitor @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# description: monitor start cmd +# + + +source $LINKIS_CONF_DIR/linkis-env.sh +export SERVER_SUFFIX="linkis-extensions/linkis-et-monitor" + +export SERVER_CLASS=org.apache.linkis.monitor.LinksMonitorApplication + +if test -z "$MONITOR_HEAP_SIZE" + then + if test -z "$SERVER_HEAP_SIZE" + then + export SERVER_HEAP_SIZE="512M" + fi +else + export SERVER_HEAP_SIZE=$MONITOR_HEAP_SIZE +fi + +#export DEBUG_PORT= + +export COMMON_START_BIN=$LINKIS_HOME/sbin/ext/linkis-common-start +if [[ ! -f "${COMMON_START_BIN}" ]]; then + echo "The $COMMON_START_BIN does not exist!" + exit 1 +else + sh $COMMON_START_BIN +fi \ No newline at end of file diff --git a/linkis-dist/src/main/assembly/distribution.xml b/linkis-dist/src/main/assembly/distribution.xml index 57fd07d4d6..a506c99b18 100644 --- a/linkis-dist/src/main/assembly/distribution.xml +++ b/linkis-dist/src/main/assembly/distribution.xml @@ -261,6 +261,21 @@ + + + + + + ../linkis-extensions/linkis-et-monitor/target/out/lib + + + linkis-package/lib/linkis-extensions/linkis-et-monitor + + + **/* + + + diff --git a/linkis-extensions/linkis-et-monitor/pom.xml b/linkis-extensions/linkis-et-monitor/pom.xml new file mode 100644 index 0000000000..0e5f44c076 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/pom.xml @@ -0,0 +1,112 @@ + + + + 4.0.0 + + org.apache.linkis + linkis + ${revision} + ../../pom.xml + + + linkis-et-monitor + + + + org.apache.linkis + linkis-httpclient + ${project.version} + + + org.apache.linkis + linkis-mybatis + ${project.version} + + + + org.apache.linkis + linkis-storage + ${project.version} + provided + + + org.apache.linkis + linkis-rpc + ${project.version} + provided + + + + org.apache.linkis + linkis-gateway-httpclient-support + ${project.version} + + + + org.apache.linkis + linkis-common + ${project.version} + + + + org.springframework + spring-test + test + + + + junit + junit + test + + + org.apache.linkis + linkis-computation-client + ${project.version} + + + + + + + net.alchim31.maven + scala-maven-plugin + + + org.apache.maven.plugins + maven-assembly-plugin + false + + false + out + false + false + + src/main/assembly/distribution.xml + + + + + make-assembly + + single + + package + + + + + + + diff --git a/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml b/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml new file mode 100644 index 0000000000..e606ed79f2 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/assembly/distribution.xml @@ -0,0 +1,296 @@ + + + + + linkis-et-monitor + + dir + + false + linkis-et-monitor + + + + + + lib + true + true + false + false + true + + + antlr:antlr:jar + aopalliance:aopalliance:jar + asm:asm:jar + cglib:cglib:jar + com.amazonaws:aws-java-sdk-autoscaling:jar + com.amazonaws:aws-java-sdk-core:jar + com.amazonaws:aws-java-sdk-ec2:jar + com.amazonaws:aws-java-sdk-route53:jar + com.amazonaws:aws-java-sdk-sts:jar + com.amazonaws:jmespath-java:jar + com.fasterxml.jackson.core:jackson-annotations:jar + com.fasterxml.jackson.core:jackson-core:jar + com.fasterxml.jackson.core:jackson-databind:jar + com.fasterxml.jackson.dataformat:jackson-dataformat-cbor:jar + com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar + com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-base:jar + com.fasterxml.jackson.jaxrs:jackson-jaxrs-json-provider:jar + com.fasterxml.jackson.module:jackson-module-jaxb-annotations:jar + com.fasterxml.jackson.module:jackson-module-parameter-names:jar + com.fasterxml.jackson.module:jackson-module-paranamer:jar + com.fasterxml.jackson.module:jackson-module-scala_2.11:jar + com.github.andrewoma.dexx:dexx-collections:jar + com.github.vlsi.compactmap:compactmap:jar + com.google.code.findbugs:annotations:jar + com.google.code.findbugs:jsr305:jar + com.google.code.gson:gson:jar + com.google.guava:guava:jar + com.google.inject:guice:jar + com.google.protobuf:protobuf-java:jar + com.netflix.archaius:archaius-core:jar + com.netflix.eureka:eureka-client:jar + com.netflix.eureka:eureka-core:jar + com.netflix.hystrix:hystrix-core:jar + com.netflix.netflix-commons:netflix-commons-util:jar + com.netflix.netflix-commons:netflix-eventbus:jar + com.netflix.netflix-commons:netflix-infix:jar + com.netflix.netflix-commons:netflix-statistics:jar + com.netflix.ribbon:ribbon:jar + com.netflix.ribbon:ribbon-core:jar + com.netflix.ribbon:ribbon-eureka:jar + com.netflix.ribbon:ribbon-httpclient:jar + com.netflix.ribbon:ribbon-loadbalancer:jar + com.netflix.ribbon:ribbon-transport:jar + com.netflix.servo:servo-core:jar + com.ning:async-http-client:jar + com.sun.jersey.contribs:jersey-apache-client4:jar + com.sun.jersey:jersey-client:jar + com.sun.jersey:jersey-core:jar + com.sun.jersey:jersey-json:jar + com.sun.jersey:jersey-server:jar + com.sun.jersey:jersey-servlet:jar + com.sun.xml.bind:jaxb-impl:jar + com.thoughtworks.paranamer:paranamer:jar + com.thoughtworks.xstream:xstream:jar + org.apache.linkis:linkis-common:jar + org.apache.linkis:linkis-module:jar + commons-beanutils:commons-beanutils:jar + commons-beanutils:commons-beanutils-core:jar + commons-cli:commons-cli:jar + commons-codec:commons-codec:jar + commons-collections:commons-collections:jar + commons-configuration:commons-configuration:jar + commons-daemon:commons-daemon:jar + commons-dbcp:commons-dbcp:jar + commons-digester:commons-digester:jar + commons-httpclient:commons-httpclient:jar + commons-io:commons-io:jar + commons-jxpath:commons-jxpath:jar + commons-lang:commons-lang:jar + commons-logging:commons-logging:jar + commons-net:commons-net:jar + commons-pool:commons-pool:jar + io.micrometer:micrometer-core:jar + io.netty:netty:jar + io.netty:netty-all:jar + io.netty:netty-buffer:jar + io.netty:netty-codec:jar + io.netty:netty-codec-http:jar + io.netty:netty-common:jar + io.netty:netty-handler:jar + io.netty:netty-transport:jar + io.netty:netty-transport-native-epoll:jar + io.reactivex:rxjava:jar + io.reactivex:rxnetty:jar + io.reactivex:rxnetty-contexts:jar + io.reactivex:rxnetty-servo:jar + javax.activation:activation:jar + javax.annotation:javax.annotation-api:jar + javax.inject:javax.inject:jar + javax.servlet:javax.servlet-api:jar + javax.servlet.jsp:jsp-api:jar + javax.validation:validation-api:jar + javax.websocket:javax.websocket-api:jar + javax.ws.rs:javax.ws.rs-api:jar + javax.xml.bind:jaxb-api:jar + javax.xml.stream:stax-api:jar + joda-time:joda-time:jar + log4j:log4j:jar + mysql:mysql-connector-java:jar + net.databinder.dispatch:dispatch-core_2.11:jar + net.databinder.dispatch:dispatch-json4s-jackson_2.11:jar + org.antlr:antlr-runtime:jar + org.antlr:stringtemplate:jar + org.apache.commons:commons-compress:jar + org.apache.commons:commons-math:jar + org.apache.commons:commons-math3:jar + org.apache.curator:curator-client:jar + org.apache.curator:curator-framework:jar + org.apache.curator:curator-recipes:jar + org.apache.directory.api:api-asn1-api:jar + org.apache.directory.api:api-util:jar + org.apache.directory.server:apacheds-i18n:jar + org.apache.directory.server:apacheds-kerberos-codec:jar + org.apache.hadoop:hadoop-annotations:jar + org.apache.hadoop:hadoop-auth:jar + org.apache.hadoop:hadoop-common:jar + org.apache.hadoop:hadoop-hdfs:jar + org.apache.htrace:htrace-core:jar + org.apache.httpcomponents:httpclient:jar + org.apache.httpcomponents:httpcore:jar + org.apache.logging.log4j:log4j-api:jar + org.apache.logging.log4j:log4j-core:jar + org.apache.logging.log4j:log4j-jul:jar + org.apache.logging.log4j:log4j-slf4j-impl:jar + org.apache.zookeeper:zookeeper:jar + org.aspectj:aspectjweaver:jar + org.bouncycastle:bcpkix-jdk15on:jar + org.bouncycastle:bcprov-jdk15on:jar + org.codehaus.jackson:jackson-jaxrs:jar + org.codehaus.jackson:jackson-xc:jar + org.codehaus.jettison:jettison:jar + org.codehaus.woodstox:stax2-api:jar + org.codehaus.woodstox:woodstox-core-asl:jar + org.eclipse.jetty:jetty-annotations:jar + org.eclipse.jetty:jetty-client:jar + org.eclipse.jetty:jetty-continuation:jar + org.eclipse.jetty:jetty-http:jar + org.eclipse.jetty:jetty-io:jar + org.eclipse.jetty:jetty-jndi:jar + org.eclipse.jetty:jetty-plus:jar + org.eclipse.jetty:jetty-security:jar + org.eclipse.jetty:jetty-server:jar + org.eclipse.jetty:jetty-servlet:jar + org.eclipse.jetty:jetty-servlets:jar + org.eclipse.jetty:jetty-util:jar + org.eclipse.jetty:jetty-webapp:jar + org.eclipse.jetty:jetty-xml:jar + org.eclipse.jetty.websocket:javax-websocket-client-impl:jar + org.eclipse.jetty.websocket:javax-websocket-server-impl:jar + org.eclipse.jetty.websocket:websocket-api:jar + org.eclipse.jetty.websocket:websocket-client:jar + org.eclipse.jetty.websocket:websocket-common:jar + org.eclipse.jetty.websocket:websocket-server:jar + org.eclipse.jetty.websocket:websocket-servlet:jar + org.fusesource.leveldbjni:leveldbjni-all:jar + org.glassfish.hk2:class-model:jar + org.glassfish.hk2:config-types:jar + org.glassfish.hk2.external:aopalliance-repackaged:jar + org.glassfish.hk2.external:asm-all-repackaged:jar + org.glassfish.hk2.external:bean-validator:jar + org.glassfish.hk2.external:javax.inject:jar + org.glassfish.hk2:hk2:jar + org.glassfish.hk2:hk2-api:jar + org.glassfish.hk2:hk2-config:jar + org.glassfish.hk2:hk2-core:jar + org.glassfish.hk2:hk2-locator:jar + org.glassfish.hk2:hk2-runlevel:jar + org.glassfish.hk2:hk2-utils:jar + org.glassfish.hk2:osgi-resource-locator:jar + org.glassfish.hk2:spring-bridge:jar + org.glassfish.jersey.bundles:jaxrs-ri:jar + org.glassfish.jersey.bundles.repackaged:jersey-guava:jar + org.glassfish.jersey.containers:jersey-container-servlet:jar + org.glassfish.jersey.containers:jersey-container-servlet-core:jar + org.glassfish.jersey.core:jersey-client:jar + org.glassfish.jersey.core:jersey-common:jar + org.glassfish.jersey.core:jersey-server:jar + org.glassfish.jersey.ext:jersey-entity-filtering:jar + org.glassfish.jersey.ext:jersey-spring3:jar + org.glassfish.jersey.media:jersey-media-jaxb:jar + org.glassfish.jersey.media:jersey-media-json-jackson:jar + org.glassfish.jersey.media:jersey-media-multipart:jar + org.hdrhistogram:HdrHistogram:jar + org.javassist:javassist:jar + org.json4s:json4s-ast_2.11:jar + org.json4s:json4s-core_2.11:jar + org.json4s:json4s-jackson_2.11:jar + org.jsoup:jsoup:jar + org.jvnet.mimepull:mimepull:jar + org.jvnet:tiger-types:jar + org.latencyutils:LatencyUtils:jar + org.mortbay.jasper:apache-el:jar + org.mortbay.jetty:jetty:jar + org.mortbay.jetty:jetty-util:jar + org.ow2.asm:asm-analysis:jar + org.ow2.asm:asm-commons:jar + org.ow2.asm:asm-tree:jar + org.reflections:reflections:jar + org.scala-lang.modules:scala-parser-combinators_2.11:jar + org.scala-lang.modules:scala-xml_2.11:jar + org.scala-lang:scala-compiler:jar + org.scala-lang:scala-library:jar + org.scala-lang:scala-reflect:jar + org.scala-lang:scalap:jar + org.slf4j:jul-to-slf4j:jar + org.slf4j:slf4j-api:jar + org.springframework.boot:spring-boot:jar + org.springframework.boot:spring-boot-actuator:jar + org.springframework.boot:spring-boot-actuator-autoconfigure:jar + org.springframework.boot:spring-boot-autoconfigure:jar + org.springframework.boot:spring-boot-starter:jar + org.springframework.boot:spring-boot-starter-actuator:jar + org.springframework.boot:spring-boot-starter-aop:jar + org.springframework.boot:spring-boot-starter-jetty:jar + org.springframework.boot:spring-boot-starter-json:jar + org.springframework.boot:spring-boot-starter-log4j2:jar + org.springframework.boot:spring-boot-starter-web:jar + org.springframework.cloud:spring-cloud-commons:jar + org.springframework.cloud:spring-cloud-config-client:jar + org.springframework.cloud:spring-cloud-context:jar + org.springframework.cloud:spring-cloud-netflix-archaius:jar + org.springframework.cloud:spring-cloud-netflix-core:jar + org.springframework.cloud:spring-cloud-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-netflix-ribbon:jar + org.springframework.cloud:spring-cloud-starter:jar + org.springframework.cloud:spring-cloud-starter-config:jar + org.springframework.cloud:spring-cloud-starter-eureka:jar + org.springframework.cloud:spring-cloud-starter-netflix-archaius:jar + org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:jar + org.springframework.cloud:spring-cloud-starter-netflix-ribbon:jar + org.springframework.security:spring-security-crypto:jar + org.springframework.security:spring-security-rsa:jar + org.springframework:spring-aop:jar + org.springframework:spring-beans:jar + org.springframework:spring-context:jar + org.springframework:spring-core:jar + org.springframework:spring-expression:jar + org.springframework:spring-jcl:jar + org.springframework:spring-web:jar + org.springframework:spring-webmvc:jar + org.tukaani:xz:jar + org.yaml:snakeyaml:jar + software.amazon.ion:ion-java:jar + xerces:xercesImpl:jar + xmlenc:xmlenc:jar + xmlpull:xmlpull:jar + xpp3:xpp3_min:jar + + + + + + + + + diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java new file mode 100644 index 0000000000..8f503dcf65 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/LinksMonitorApplication.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor; + +import org.apache.linkis.LinkisBaseServerApp; + +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.scheduling.annotation.EnableScheduling; + +@EnableScheduling +@SpringBootApplication +public class LinksMonitorApplication { + + public static void main(String[] args) throws ReflectiveOperationException { + LinkisBaseServerApp.main(args); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java new file mode 100644 index 0000000000..b4492c95ac --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/dao/VersionDao.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.dao; + +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.entity.ResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.vo.CleanResourceVo; + +import org.apache.ibatis.annotations.*; + +import java.util.Date; +import java.util.List; + +public interface VersionDao { + + @Select( + "select resource_id, count(resource_id) as version_count, max(version) as max_version from " + + "linkis_ps_bml_resources_version lpbrv where start_time < #{startTime} GROUP BY resource_id HAVING count(resource_id) > #{maxVersionNum} limit #{limitNum}") + List getAllNeedCleanResource( + @Param("maxVersionNum") Integer maxVersionNum, + @Param("startTime") Date startTime, + @Param("limitNum") int num); + + @Select( + "select * from linkis_ps_bml_resources_version where resource_id = #{resourceId} and version < #{minKeepVersion} and version <> 'v000001'") + List getCleanVersionsByResourceId( + @Param("resourceId") String resourceId, @Param("minKeepVersion") String minKeepVersion); + + @Insert({ + "insert into linkis_ps_bml_cleaned_resources_version(`resource_id`,`file_md5`,`version`,`size`,`start_byte`, `end_byte`,`resource`,`description`," + + "`start_time`,`end_time`,`client_ip`,`updator`,`enable_flag`,`old_resource`) values(#{resourceId},#{fileMd5},#{version},#{size},#{startByte},#{endByte}" + + ",#{resource},#{description},#{startTime},#{endTime},#{clientIp},#{updator},#{enableFlag},#{oldResource})" + }) + @Options(useGeneratedKeys = true, keyProperty = "id") + void insertCleanResourceVersion(CleanedResourceVersion cleanedResourceVersion); + + @Delete("delete from linkis_ps_bml_resources_version where id=#{id}") + void deleteResourceVersionById(@Param("id") long id); + + @Select( + "select version from linkis_ps_bml_resources_version where resource_id =#{resourceId} and version <= #{maxVersion} order by version desc limit #{keepNum},1") + String getMinKeepVersion( + @Param("resourceId") String resourceId, + @Param("maxVersion") String maxVersion, + @Param("keepNum") int keepNum); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java new file mode 100644 index 0000000000..5a0bfcc487 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/CleanedResourceVersion.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.entity; + +import java.util.Date; + +public class CleanedResourceVersion { + + private long id; + + private String resourceId; + + private String fileMd5; + + private String version; + + private long size; + + private String resource; + + private String oldResource; + + private String description; + + private String clientIp; + + private boolean enableFlag; + + private String user; + + private String system; + + private Date startTime; + + private Date endTime; + + private long startByte; + + private long endByte; + + private String updator; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public String getSystem() { + return system; + } + + public void setSystem(String system) { + this.system = system; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getResource() { + return resource; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public String getOldResource() { + return oldResource; + } + + public void setOldResource(String oldResource) { + this.oldResource = oldResource; + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getFileMd5() { + return fileMd5; + } + + public void setFileMd5(String fileMd5) { + this.fileMd5 = fileMd5; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getClientIp() { + return clientIp; + } + + public void setClientIp(String clientIp) { + this.clientIp = clientIp; + } + + public boolean isEnableFlag() { + return enableFlag; + } + + public void setEnableFlag(boolean enableFlag) { + this.enableFlag = enableFlag; + } + + public long getStartByte() { + return startByte; + } + + public void setStartByte(long startByte) { + this.startByte = startByte; + } + + public long getEndByte() { + return endByte; + } + + public void setEndByte(long endByte) { + this.endByte = endByte; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getUpdator() { + return updator; + } + + public void setUpdator(String updator) { + this.updator = updator; + } + + public static CleanedResourceVersion copyFromResourceVersion(ResourceVersion resourceVersion) { + CleanedResourceVersion cleanedResourceVersion = new CleanedResourceVersion(); + cleanedResourceVersion.setResourceId(resourceVersion.getResourceId()); + cleanedResourceVersion.setOldResource(resourceVersion.getResource()); + cleanedResourceVersion.setFileMd5(resourceVersion.getFileMd5()); + cleanedResourceVersion.setClientIp(resourceVersion.getClientIp()); + cleanedResourceVersion.setSize(resourceVersion.getSize()); + cleanedResourceVersion.setEnableFlag(resourceVersion.getEnableFlag()); + cleanedResourceVersion.setVersion(resourceVersion.getVersion()); + cleanedResourceVersion.setStartByte(resourceVersion.getStartByte()); + cleanedResourceVersion.setEndByte(resourceVersion.getEndByte()); + cleanedResourceVersion.setStartTime(resourceVersion.getStartTime()); + cleanedResourceVersion.setEndTime(resourceVersion.getEndTime()); + return cleanedResourceVersion; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java new file mode 100644 index 0000000000..5d297cf180 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/entity/ResourceVersion.java @@ -0,0 +1,206 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.entity; + +import java.util.Date; + +public class ResourceVersion { + + private long id; + + private String resourceId; + + private String fileMd5; + + private String version; + + private long size; + + private String resource; + + private String description; + + private String clientIp; + + private boolean enableFlag; + + private String user; + + private String system; + + private Date startTime; + + private Date endTime; + + private long startByte; + + private long endByte; + + private String updator; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public String getUser() { + return user; + } + + public void setUser(String user) { + this.user = user; + } + + public String getSystem() { + return system; + } + + public void setSystem(String system) { + this.system = system; + } + + public String getVersion() { + return version; + } + + public void setVersion(String version) { + this.version = version; + } + + public String getResource() { + return resource; + } + + public void setResource(String resource) { + this.resource = resource; + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getFileMd5() { + return fileMd5; + } + + public void setFileMd5(String fileMd5) { + this.fileMd5 = fileMd5; + } + + public long getSize() { + return size; + } + + public void setSize(long size) { + this.size = size; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public String getClientIp() { + return clientIp; + } + + public void setClientIp(String clientIp) { + this.clientIp = clientIp; + } + + public boolean getEnableFlag() { + return enableFlag; + } + + public void setEnableFlag(boolean enableFlag) { + this.enableFlag = enableFlag; + } + + public long getStartByte() { + return startByte; + } + + public void setStartByte(long startByte) { + this.startByte = startByte; + } + + public long getEndByte() { + return endByte; + } + + public void setEndByte(long endByte) { + this.endByte = endByte; + } + + public Date getStartTime() { + return startTime; + } + + public void setStartTime(Date startTime) { + this.startTime = startTime; + } + + public Date getEndTime() { + return endTime; + } + + public void setEndTime(Date endTime) { + this.endTime = endTime; + } + + public String getUpdator() { + return updator; + } + + public void setUpdator(String updator) { + this.updator = updator; + } + + public static ResourceVersion createNewResourceVersion( + String resourceId, + String resourcePath, + String fileMd5, + String clientIp, + long size, + String version, + long startByte) { + ResourceVersion resourceVersion = new ResourceVersion(); + resourceVersion.setResourceId(resourceId); + resourceVersion.setResource(resourcePath); + resourceVersion.setFileMd5(fileMd5); + resourceVersion.setClientIp(clientIp); + resourceVersion.setSize(size); + resourceVersion.setEnableFlag(true); + resourceVersion.setVersion(version); + resourceVersion.setStartByte(startByte); + resourceVersion.setEndByte(startByte + size - 1); + resourceVersion.setStartTime(new Date(System.currentTimeMillis())); + resourceVersion.setEndTime(new Date(System.currentTimeMillis())); + return resourceVersion; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java new file mode 100644 index 0000000000..0dfa15f396 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/CleanerService.java @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service; + +public interface CleanerService { + + public void run(); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java new file mode 100644 index 0000000000..cc1a45c9e6 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/VersionService.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.storage.fs.FileSystem; + +import java.io.IOException; + +public interface VersionService { + + void doMove( + FileSystem fs, + FsPath srcPath, + FsPath destPath, + CleanedResourceVersion insertVersion, + long delVersionId) + throws IOException; + + void moveOnDb(CleanedResourceVersion insertVersion, long delVersionId); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java new file mode 100644 index 0000000000..ec2ed2d758 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/CleanerServiceImpl.java @@ -0,0 +1,167 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service.impl; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.monitor.bml.cleaner.dao.VersionDao; +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.entity.ResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.service.CleanerService; +import org.apache.linkis.monitor.bml.cleaner.service.VersionService; +import org.apache.linkis.monitor.bml.cleaner.vo.CleanResourceVo; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.storage.FSFactory; +import org.apache.linkis.storage.fs.FileSystem; +import org.apache.linkis.storage.utils.StorageConfiguration; +import org.apache.linkis.storage.utils.StorageUtils; + +import org.apache.commons.io.IOUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; + +import java.io.File; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.HashSet; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@Service +public class CleanerServiceImpl implements CleanerService { + + private final Logger logger = LoggerFactory.getLogger("CleanerServiceImpl"); + + private final SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); + + public static final String VERSION_FORMAT = "%06d"; + public static final String VERSION_PREFIX = "v"; + public static final String TRASH_DIR = "/trash"; + + private FileSystem fs = null; + + @Autowired private VersionDao versionDao; + + public void setVersionDao(VersionDao versionDao) { + this.versionDao = versionDao; + } + + private Set cleanedResourceIds = new HashSet(); + + Date previous; + + @Autowired VersionService versionService; + + public void clean() { + previous = + new Date( + System.currentTimeMillis() + - (Long) Constants.BML_PREVIOUS_INTERVAL_TIME_DAYS().getValue() * 86400 * 1000); + + if ((Integer) Constants.BML_VERSION_MAX_NUM().getValue() + - (Integer) Constants.BML_VERSION_KEEP_NUM().getValue() + <= 1) { + logger.error("conf error need to keep version num > 1"); + return; + } + List needCleanResources = getCleanResources(); + while (needCleanResources != null && needCleanResources.size() > 0) { + logger.info("need cleaned resource count:{}", needCleanResources.size()); + fs = + (FileSystem) + FSFactory.getFs(StorageUtils.HDFS, StorageConfiguration.HDFS_ROOT_USER.getValue()); + for (CleanResourceVo resourceVo : needCleanResources) { + String minVersion = + versionDao.getMinKeepVersion( + resourceVo.getResourceId(), + resourceVo.getMaxVersion(), + (Integer) Constants.BML_VERSION_KEEP_NUM().getValue() - 1); + List cleanVersionList = + versionDao.getCleanVersionsByResourceId(resourceVo.getResourceId(), minVersion); + // move on hdfs + for (ResourceVersion version : cleanVersionList) { + FsPath srcPath = new FsPath(version.getResource()); + // fs放到外层 + try { + fs.init(null); + if (!fs.exists(srcPath)) { + logger.error("try to move but bml source file:{} not exists!", version.getResource()); + CleanedResourceVersion cleanedResourceVersion = + CleanedResourceVersion.copyFromResourceVersion(version); + cleanedResourceVersion.setResource(""); + versionService.moveOnDb(cleanedResourceVersion, version.getId()); + continue; + } + String destPrefix = + version.getResource().substring(0, version.getResource().indexOf("/bml/") + 4); + String destPath = + destPrefix + + TRASH_DIR + + File.separator + + sdf.format(new Date()) + + File.separator + + version.getResourceId() + + "_" + + version.getVersion(); + FsPath dest = new FsPath(destPath); + if (!fs.exists(dest.getParent())) { + fs.mkdirs(dest.getParent()); + } + logger.info("begin to mv bml resource:{} to dest:{}", version.getResource(), destPath); + CleanedResourceVersion cleanedResourceVersion = + CleanedResourceVersion.copyFromResourceVersion(version); + cleanedResourceVersion.setResource(destPath); + versionService.doMove(fs, srcPath, dest, cleanedResourceVersion, version.getId()); + } catch (Exception e) { + logger.error("failed to mv bml resource:{}", e.getMessage(), e); + } + } + + cleanedResourceIds.add(resourceVo.getResourceId()); + } + needCleanResources = getCleanResources(); + } + } + + public void run() { + logger.info("start to clean."); + clean(); + logger.info("start to shutdown."); + shutdown(); + } + + void shutdown() { + IOUtils.closeQuietly(fs); + } + + List getCleanResources() { + List cleanResourceVoList = + versionDao.getAllNeedCleanResource( + (Integer) Constants.BML_VERSION_MAX_NUM().getValue(), + previous, + (Integer) Constants.BML_CLEAN_ONCE_RESOURCE_LIMIT_NUM().getValue()); + + return cleanResourceVoList.stream() + .filter(cleanResourceVo -> !cleanedResourceIds.contains(cleanResourceVo.getResourceId())) + .collect(Collectors.toList()); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java new file mode 100644 index 0000000000..be9e5b70ea --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/service/impl/VersionServiceImpl.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.service.impl; + +import org.apache.linkis.common.io.FsPath; +import org.apache.linkis.monitor.bml.cleaner.dao.VersionDao; +import org.apache.linkis.monitor.bml.cleaner.entity.CleanedResourceVersion; +import org.apache.linkis.monitor.bml.cleaner.service.VersionService; +import org.apache.linkis.storage.fs.FileSystem; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Service; +import org.springframework.transaction.annotation.Transactional; + +import java.io.IOException; + +@Service +public class VersionServiceImpl implements VersionService { + + @Autowired VersionDao versionDao; + + public void setVersionDao(VersionDao versionDao) { + this.versionDao = versionDao; + } + + @Transactional(rollbackFor = Throwable.class) + public void doMove( + FileSystem fs, + FsPath srcPath, + FsPath destPath, + CleanedResourceVersion insertVersion, + long delVersionId) + throws IOException { + versionDao.insertCleanResourceVersion(insertVersion); + versionDao.deleteResourceVersionById(delVersionId); + fs.renameTo(srcPath, destPath); + } + + @Transactional + public void moveOnDb(CleanedResourceVersion insertVersion, long delVersionId) { + versionDao.insertCleanResourceVersion(insertVersion); + versionDao.deleteResourceVersionById(delVersionId); + } + + public String test() { + return "this a test string"; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java new file mode 100644 index 0000000000..4ef205effd --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/bml/cleaner/vo/CleanResourceVo.java @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.bml.cleaner.vo; + +public class CleanResourceVo { + private String resourceId; + private int versionCount; + private String maxVersion; + + public String getResourceId() { + return resourceId; + } + + public void setResourceId(String resourceId) { + this.resourceId = resourceId; + } + + public int getVersionCount() { + return versionCount; + } + + public void setVersionCount(int versionCount) { + this.versionCount = versionCount; + } + + public String getMaxVersion() { + return maxVersion; + } + + public void setMaxVersion(String maxVersion) { + this.maxVersion = maxVersion; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java new file mode 100644 index 0000000000..eb5c11af87 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ListenerConfig.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.config; + +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.Configuration; +import org.springframework.context.event.ContextClosedEvent; +import org.springframework.context.event.EventListener; + +import java.io.IOException; + +import org.slf4j.Logger; + +@Configuration +public class ListenerConfig { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @EventListener + private void shutdownEntrance(ContextClosedEvent event) { + try { + ThreadUtils.executors.shutdown(); + HttpsUntils.client.close(); + } catch (IOException e) { + logger.error("ListenerConfig error msg {}", e.getMessage()); + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java new file mode 100644 index 0000000000..5b4c2e269a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/MonitorConfig.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.config; + +import org.apache.linkis.common.conf.CommonVars; +import org.apache.linkis.common.conf.Configuration; + +public class MonitorConfig { + + public static final String shellPath = Configuration.getLinkisHome() + "/admin/"; + + public static final CommonVars USER_MODE_TIMEOUT = + CommonVars.apply("linkis.monitor.user.timeOut", 300); + public static final CommonVars USER_MODE_ENGINE = + CommonVars.apply("linkis.monitor.user.enginelist", "[]"); + + public static final CommonVars ECM_TASK_MAJOR = + CommonVars.apply("linkis.monitor.ecmResourceTask.major", 0.03); + public static final CommonVars ECM_TASK_MINOR = + CommonVars.apply("linkis.monitor.ecmResourceTask.minor", 0.1); + public static final CommonVars ECM_TASK_IMURL = + CommonVars.apply("linkis.monitor.metrics.imsUrl"); + public static final CommonVars ECM_TASK_USER_AUTHKEY = + CommonVars.apply("linkis.monitor.metrics.userAuthKey"); + + public static final CommonVars JOB_HISTORY_TIME_EXCEED = + CommonVars.apply("linkis.monitor.jobhistory.id.timeExceed", 0L); + + public static final CommonVars ENTRANCE_TASK_USERTOTAL = + CommonVars.apply("linkis.monitor.entranceTask.userTotalTask", 1000); + public static final CommonVars ENTRANCE_TASK_TOTAL_MAJOR = + CommonVars.apply("linkis.monitor.entranceTask.linkisTotalTaskMajor", 50000); + public static final CommonVars ENTRANCE_TASK_TOTAL_MINOR = + CommonVars.apply("linkis.monitor.entranceTask.linkisTotalTaskMinor", 10000); + public static final CommonVars ENTRANCE_TASK_USERLIST = + CommonVars.apply("linkis.monitor.entranceTask.userlist", "[]"); + + public static final CommonVars SCHEDULED_CONFIG_NUM = + CommonVars.apply("linkis.monitor.scheduled.pool.cores.num", 10); + + public static final CommonVars SHELL_TIMEOUT = + CommonVars.apply("linkis.monitor.shell.time.out.minute", 30); + + public static final CommonVars USER_MODE_INTERFACE_TIMEOUT = + CommonVars.apply("linkis.monitor.user.mode.time.out", 30 * 1000); + + public static final CommonVars SOLUTION_URL = + CommonVars.apply( + "linkis.monitor.jobhistory.solution.url", + "https://linkis.apache.org/docs/latest/tuning-and-troubleshooting/error-guide/error-code"); + + public static final CommonVars TASK_RUNTIME_TIMEOUT_DESC = + CommonVars.apply( + "linkis.monitor.jobhistory.task.timeout.desc", + "[Linkis任务信息]您好,您在Linkis/DSS提交的任务(任务ID:{0}),已经运行超过{1}h," + + "请关注是否任务正常,如果不正常您可以到Linkis/DSS管理台进行任务的kill,集群信息为BDAP({2})。详细解决方案见Q47:{3} "); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java new file mode 100644 index 0000000000..e954122671 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/config/ScheduledConfig.java @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.config; + +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.scheduling.annotation.SchedulingConfigurer; +import org.springframework.scheduling.config.ScheduledTaskRegistrar; + +import java.util.concurrent.Executor; +import java.util.concurrent.Executors; + +@Configuration +public class ScheduledConfig implements SchedulingConfigurer { + @Bean + public Executor taskExecutor() { + return Executors.newScheduledThreadPool(MonitorConfig.SCHEDULED_CONFIG_NUM.getValue()); + } + + @Override + public void configureTasks(ScheduledTaskRegistrar scheduledTaskRegistrar) { + scheduledTaskRegistrar.setScheduler(taskExecutor()); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java new file mode 100644 index 0000000000..760c06ba1b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EngineEntity.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.entity; + +import java.io.Serializable; + +public class EngineEntity implements Serializable { + + private String engineType; + + private String code; + + private String runType; + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getCode() { + return code; + } + + public void setCode(String code) { + this.code = code; + } + + public String getRunType() { + return runType; + } + + public void setRunType(String runType) { + this.runType = runType; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java new file mode 100644 index 0000000000..241439b499 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/EntranceEntity.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.entity; + +import java.io.Serializable; + +public class EntranceEntity implements Serializable { + + private Integer runningtasks; + + private Integer queuedtasks; + + private String alteruser; + + private String username; + + public Integer getQueuedtasks() { + return queuedtasks; + } + + public void setQueuedtasks(Integer queuedtasks) { + this.queuedtasks = queuedtasks; + } + + public String getAlteruser() { + return alteruser; + } + + public void setAlteruser(String alteruser) { + this.alteruser = alteruser; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public Integer getRunningtasks() { + return runningtasks; + } + + public void setRunningtasks(Integer runningtasks) { + this.runningtasks = runningtasks; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java new file mode 100644 index 0000000000..2905f8aced --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/entity/IndexEntity.java @@ -0,0 +1,85 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.entity; + +import org.apache.linkis.monitor.constants.Constants; + +public class IndexEntity { + + private final String subsystemId = Constants.ALERT_SUB_SYSTEM_ID(); + private String interfaceName; + private String attrGroup; + private String attrName; + private String hostIp; + private String metricValue; + + public String getSubsystemId() { + return subsystemId; + } + + public String getInterfaceName() { + return interfaceName; + } + + public void setInterfaceName(String interfaceName) { + this.interfaceName = interfaceName; + } + + public String getAttrGroup() { + return attrGroup; + } + + public void setAttrGroup(String attrGroup) { + this.attrGroup = attrGroup; + } + + public String getAttrName() { + return attrName; + } + + public void setAttrName(String attrName) { + this.attrName = attrName; + } + + public String getHostIp() { + return hostIp; + } + + public void setHostIp(String hostIp) { + this.hostIp = hostIp; + } + + public String getMetricValue() { + return metricValue; + } + + public void setMetricValue(String metricValue) { + this.metricValue = metricValue; + } + + public IndexEntity() {} + + public IndexEntity( + String interfaceName, String attrGroup, String attrName, String hostIp, String metricValue) { + this.interfaceName = interfaceName; + this.attrGroup = attrGroup; + this.attrName = attrName; + this.hostIp = hostIp; + this.metricValue = metricValue; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java new file mode 100644 index 0000000000..d805c1a99b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InsLabelRelationDao.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.dao; + +import org.apache.linkis.monitor.instance.entity.InsPersistenceLabel; + +import java.util.List; + +public interface InsLabelRelationDao { + + /** + * Search labels + * + * @param instance instance value (http:port) + * @return + */ + List searchLabelsByInstance(String instance); + + void dropRelationsByInstance(String instance); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java new file mode 100644 index 0000000000..973801a37a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceInfoDao.java @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.dao; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.monitor.instance.entity.InstanceInfo; + +import org.apache.ibatis.annotations.Param; + +import java.util.List; + +public interface InstanceInfoDao { + + void removeInstance(ServiceInstance instance); + + List getInstanceInfoByApplicationNameAndHostnameFuzzy( + @Param("hostname") String hostname, @Param("applicationName") String applicationName); + + List getInstanceInfoByApplicationNameAndInstanceName( + @Param("instanceName") String instanceName, @Param("applicationName") String applicationName); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java new file mode 100644 index 0000000000..361bebf8ba --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/dao/InstanceLabelDao.java @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.dao; + +import org.apache.linkis.monitor.instance.entity.InsPersistenceLabel; + +import java.util.List; + +public interface InstanceLabelDao { + /** + * Remove label + * + * @param label label entity + */ + void remove(InsPersistenceLabel label); + + /** + * Remove key -> value map from label id + * + * @param labelId + */ + void doRemoveKeyValues(Integer labelId); + + void doRemoveKeyValuesBatch(List labelIds); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java new file mode 100644 index 0000000000..0959c27988 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabel.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.entity; + +import java.util.Date; + +public class InsPersistenceLabel { + private Integer id; + private int labelValueSize = -1; + private String stringValue; + private Boolean modifiable = false; + + private String labelKey; + + private String feature; + + private Date updateTime; + private Date createTime; + + public Boolean getModifiable() { + return modifiable; + } + + public void setModifiable(Boolean modifiable) { + this.modifiable = modifiable; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public int getLabelValueSize() { + return labelValueSize; + } + + public void setLabelValueSize(int labelValueSize) { + this.labelValueSize = labelValueSize; + } + + public String getStringValue() { + return stringValue; + } + + public void setStringValue(String stringValue) { + this.stringValue = stringValue; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public String getLabelKey() { + return labelKey; + } + + public void setLabelKey(String labelKey) { + this.labelKey = labelKey; + } + + public String getFeature() { + return feature; + } + + public void setFeature(String feature) { + this.feature = feature; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java new file mode 100644 index 0000000000..029f552608 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InsPersistenceLabelValue.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.entity; + +public class InsPersistenceLabelValue { + + private Integer labelId; + + private String valueKey; + + private String valueContent; + + public InsPersistenceLabelValue() {} + + public InsPersistenceLabelValue(Integer labelId, String key, String content) { + this.labelId = labelId; + this.valueKey = key; + this.valueContent = content; + } + + public String getValueKey() { + return valueKey; + } + + public void setValueKey(String valueKey) { + this.valueKey = valueKey; + } + + public String getValueContent() { + return valueContent; + } + + public void setValueContent(String valueContent) { + this.valueContent = valueContent; + } + + public Integer getLabelId() { + return labelId; + } + + public void setLabelId(Integer labelId) { + this.labelId = labelId; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java new file mode 100644 index 0000000000..5360ffd323 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/instance/entity/InstanceInfo.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.instance.entity; + +import java.util.ArrayList; +import java.util.Date; +import java.util.List; + +public class InstanceInfo { + /** Automatic increment */ + private Integer id; + + private String applicationName; + + private String instance; + + private Date updateTime; + + private Date createTime; + /** Labels related */ + private List labels = new ArrayList<>(); + + public InstanceInfo() {} + + public InstanceInfo(String applicationName, String instance) { + this.applicationName = applicationName; + this.instance = instance; + } + + public List getLabels() { + return labels; + } + + public void setLabels(List labels) { + this.labels = labels; + } + + public Integer getId() { + return id; + } + + public void setId(Integer id) { + this.id = id; + } + + public String getApplicationName() { + return applicationName; + } + + public void setApplicationName(String applicationName) { + this.applicationName = applicationName; + } + + public String getInstance() { + return instance; + } + + public void setInstance(String instance) { + this.instance = instance; + } + + public Date getUpdateTime() { + return updateTime; + } + + public void setUpdateTime(Date updateTime) { + this.updateTime = updateTime; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java new file mode 100644 index 0000000000..aa73471c49 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/QueryUtils.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory; + +import java.text.DateFormat; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class QueryUtils { + + private static DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); + + public static String dateToString(Date date) { + return dateFormat.format(date); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java new file mode 100644 index 0000000000..ebd213cfb3 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/dao/JobHistoryMapper.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.dao; + +import org.apache.linkis.monitor.jobhistory.entity.JobHistory; + +import org.apache.ibatis.annotations.Param; + +import java.util.Date; +import java.util.List; + +public interface JobHistoryMapper { + + List selectJobHistory(JobHistory jobReq); + + List search( + @Param("id") Long id, + @Param("umUser") String username, + @Param("status") List status, + @Param("startDate") Date startDate, + @Param("endDate") Date endDate, + @Param("engineType") String engineType); + + void updateIncompleteJobStatusGivenIDList( + @Param("idList") List idList, @Param("targetStatus") String targetStatus); + + void updateJobStatusForInstanceGivenStatusList( + @Param("instanceName") String instanceName, + @Param("statusList") List statusList, + @Param("targetStatus") String targetStatus, + @Param("startDate") Date startDate); + + List searchByCache( + @Param("id") Long id, + @Param("umUser") String username, + @Param("status") List status, + @Param("startDate") Date startDate, + @Param("endDate") Date endDate, + @Param("engineType") String engineType); + + List searchByCacheAndUpdateTime( + @Param("id") Long id, + @Param("umUser") String username, + @Param("status") List status, + @Param("startDate") Date startDate, + @Param("endDate") Date endDate, + @Param("engineType") String engineType); + + Long selectIdByHalfDay(@Param("id") long beginId); + + Long selectMaxId(); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java new file mode 100644 index 0000000000..e2499d328d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/entity/JobHistory.java @@ -0,0 +1,221 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.entity; + +import org.apache.linkis.monitor.jobhistory.QueryUtils; + +import java.util.Date; + +public class JobHistory { + + private Long id; + + private String jobReqId; + + private String submitUser; + + private String executeUser; + + private String source; + + private String labels; + + private String params; + + private String progress; + + private String status; + + private String logPath; + + private Integer errorCode; + + private String errorDesc; + + private Date createdTime; + + private Date updatedTime; + + private String updateTimeMills; + + private String instances; + + private String metrics; + + private String engineType; + + private String executionCode; + + private String observeInfo; + + public Long getId() { + return id; + } + + public void setId(Long id) { + this.id = id; + } + + public String getJobReqId() { + return jobReqId; + } + + public void setJobReqId(String jobReqId) { + this.jobReqId = jobReqId; + } + + public String getSubmitUser() { + return submitUser; + } + + public void setSubmitUser(String submitUser) { + this.submitUser = submitUser; + } + + public String getExecuteUser() { + return executeUser; + } + + public void setExecuteUser(String executeUser) { + this.executeUser = executeUser; + } + + public String getSource() { + return source; + } + + public void setSource(String source) { + this.source = source; + } + + public String getLabels() { + return labels; + } + + public void setLabels(String labels) { + this.labels = labels; + } + + public String getParams() { + return params; + } + + public void setParams(String params) { + this.params = params; + } + + public String getProgress() { + return progress; + } + + public void setProgress(String progress) { + this.progress = progress; + } + + public String getStatus() { + return status; + } + + public void setStatus(String status) { + this.status = status; + } + + public String getLogPath() { + return logPath; + } + + public void setLogPath(String logPath) { + this.logPath = logPath; + } + + public Integer getErrorCode() { + return errorCode; + } + + public void setErrorCode(Integer errorCode) { + this.errorCode = errorCode; + } + + public String getErrorDesc() { + return errorDesc; + } + + public void setErrorDesc(String errorDesc) { + this.errorDesc = errorDesc; + } + + public Date getCreatedTime() { + return createdTime; + } + + public void setCreatedTime(Date createdTime) { + this.createdTime = createdTime; + } + + public Date getUpdatedTime() { + return updatedTime; + } + + public void setUpdatedTime(Date updatedTime) { + this.updatedTime = updatedTime; + } + + public String getInstances() { + return instances; + } + + public void setInstances(String instances) { + this.instances = instances; + } + + public String getMetrics() { + return metrics; + } + + public void setMetrics(String metrics) { + this.metrics = metrics; + } + + public String getEngineType() { + return engineType; + } + + public void setEngineType(String engineType) { + this.engineType = engineType; + } + + public String getExecutionCode() { + return executionCode; + } + + public void setExecutionCode(String executionCode) { + this.executionCode = executionCode; + } + + public String getUpdateTimeMills() { + return QueryUtils.dateToString(getUpdatedTime()); + } + + public String getObserveInfo() { + return observeInfo; + } + + public void setObserveInfo(String observeInfo) { + this.observeInfo = observeInfo; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java new file mode 100644 index 0000000000..b2c83be013 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/AnomalyScannerException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; + +public class AnomalyScannerException extends ErrorException { + public AnomalyScannerException(int errCode, String desc) { + super(errCode, desc); + } + + public AnomalyScannerException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + @Override + public ExceptionLevel getLevel() { + return super.getLevel(); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java new file mode 100644 index 0000000000..b1c5e64743 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/jobhistory/exception/DirtyDataCleanException.java @@ -0,0 +1,37 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.exception; + +import org.apache.linkis.common.exception.ErrorException; +import org.apache.linkis.common.exception.ExceptionLevel; + +public class DirtyDataCleanException extends ErrorException { + public DirtyDataCleanException(int errCode, String desc) { + super(errCode, desc); + } + + public DirtyDataCleanException( + int errCode, String desc, String ip, int port, String serviceKind) { + super(errCode, desc, ip, port, serviceKind); + } + + @Override + public ExceptionLevel getLevel() { + return super.getLevel(); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java new file mode 100644 index 0000000000..cb7d37197f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/BmlClear.java @@ -0,0 +1,44 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.bml.cleaner.service.CleanerService; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import org.slf4j.Logger; + +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class BmlClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Autowired private CleanerService cleanerServices; + + @Scheduled(cron = "${linkis.monitor.bml.clear.history.version.cron}") + public void jobHistoryScanTask() { + logger.info("start to clear bml history version"); + cleanerServices.run(); + logger.info("end to clear bml history version"); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java new file mode 100644 index 0000000000..ddb3d3e8c8 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EcRecordClear.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; + +/** * Task: clean up linkis_cg_ec_resource_info_record data */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class EcRecordClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.ecRecord.cron}") + public void ecRecordClear() { + logger.info("Start to clear_ec_record shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "clear_ec_record.sh"); + logger.info("clear_ec_record shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "clear_ec_record.sh"); + logger.info("shell log {}", exec); + logger.info("End to clear_ec_record shell "); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java new file mode 100644 index 0000000000..a6e7879f07 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/EntranceTaskMonitor.java @@ -0,0 +1,195 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.common.ServiceInstance; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.governance.common.conf.GovernanceCommonConf; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.entity.IndexEntity; +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.rpc.Sender; +import org.apache.linkis.server.BDPJettyServerHelper; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.math.BigDecimal; +import java.util.*; + +import com.google.gson.internal.LinkedTreeMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * * Entrance monitors the number of tasks for specified users and systems. If the configured + * threshold is exceeded, an alarm will be triggered. + */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class EntranceTaskMonitor { + + private static final Logger logger = LoggerFactory.getLogger(EntranceTaskMonitor.class); + + private static final String ENTRANCE_RUNNING_TASK = "entrance_running_task"; + private static final String ENTRANCE_QUEUED_TASK = "entrance_queued_task"; + + @Scheduled(cron = "${linkis.monitor.entrance.task.cron}") + public void entranceTask() throws IOException { + List> userlist = new ArrayList<>(); + String value = MonitorConfig.ENTRANCE_TASK_USERLIST.getValue(); + if (StringUtils.isNotBlank(value)) { + userlist = BDPJettyServerHelper.gson().fromJson(value, ArrayList.class); + } + + userlist.forEach( + entranceEntity -> { + Map data = new HashMap<>(); + try { + data = + MapUtils.getMap( + HttpsUntils.getEntranceTask(null, entranceEntity.get("username"), null), + "data"); + logger.info("TaskMonitor userlist response {}:", data); + } catch (IOException e) { + logger.warn("failed to get EntranceTask data"); + } + + int runningNumber = + null != entranceEntity.get("runningtasks") + ? Integer.parseInt(entranceEntity.get("runningtasks")) + : 0; + int queuedNumber = + null != entranceEntity.get("queuedtasks") + ? Integer.parseInt(entranceEntity.get("queuedtasks")) + : 0; + + BigDecimal runningtotal = new BigDecimal((int) data.get("runningNumber")); + BigDecimal queuedtotal = new BigDecimal((int) data.get("queuedNumber")); + BigDecimal total = runningtotal.add(queuedtotal); + HashMap parms = new HashMap<>(); + parms.put("$username", entranceEntity.get("username")); + parms.put("$alteruser", entranceEntity.get("alteruser")); + parms.put("$url", Configuration.GATEWAY_URL().getValue()); + // 获取标准阈值 + if (runningtotal.intValue() > runningNumber) { + // 触发告警 用户运行任务满 + parms.put("$runningtask", String.valueOf(runningNumber)); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12005")); + } + if (queuedtotal.intValue() > queuedNumber) { + // 触发告警 用户排队任务满 + parms.put("$queuedtask", String.valueOf(queuedNumber)); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12006")); + } + int usertotalTask = MonitorConfig.ENTRANCE_TASK_USERTOTAL.getValue(); + if (total.intValue() > usertotalTask) { + parms.put("$tasktotal", String.valueOf(usertotalTask)); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12007")); + } + }); + Map likisData = null; + try { + likisData = MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", null), "data"); + logger.info("TaskMonitor hadoop response {}:", likisData); + } catch (IOException e) { + logger.warn("failed to get EntranceTask data"); + } + // 系统监控 + BigDecimal runningNumber = new BigDecimal((int) likisData.get("runningNumber")); + BigDecimal queuedNumber = new BigDecimal((int) likisData.get("queuedNumber")); + BigDecimal total = runningNumber.add(queuedNumber); + + HashMap parms = new HashMap<>(); + parms.put("$url", Configuration.GATEWAY_URL().getValue()); + int linkisTotalMajor = MonitorConfig.ENTRANCE_TASK_TOTAL_MAJOR.getValue(); + int linkisTotalMinor = MonitorConfig.ENTRANCE_TASK_TOTAL_MINOR.getValue(); + if (total.intValue() >= linkisTotalMajor) { + // 触发告警Major + parms.put("$taskmajor", String.valueOf(linkisTotalMajor)); + logger.info("TaskMonitor parms {}:", parms); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12009")); + + } else if (total.intValue() >= linkisTotalMinor) { + parms.put("$taskminor", String.valueOf(linkisTotalMinor)); + logger.info("TaskMonitor parms {}:", parms); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12008")); + } + resourceSendToIms(); + } + + public static void resourceSendToIms() { + ServiceInstance[] instances = + Sender.getInstances(GovernanceCommonConf.ENTRANCE_SERVICE_NAME().getValue()); + if (null != instances) { + for (ServiceInstance instance : instances) { + String serviceInstance = instance.getInstance(); + try { + Map instanceData = + MapUtils.getMap(HttpsUntils.getEntranceTask(null, "hadoop", serviceInstance), "data"); + int runningNumber = 0; + int queuedNumber = 0; + if (instanceData.containsKey("runningNumber")) { + runningNumber = (int) instanceData.get("runningNumber"); + } + if (instanceData.containsKey("queuedNumber")) { + queuedNumber = (int) instanceData.get("queuedNumber"); + } + logger.info("ResourceMonitor send index "); + List list = new ArrayList<>(); + list.add( + new IndexEntity( + serviceInstance, + "entrance", + ENTRANCE_RUNNING_TASK, + HttpsUntils.localHost, + String.valueOf(runningNumber))); + list.add( + new IndexEntity( + serviceInstance, + "entrance", + ENTRANCE_QUEUED_TASK, + HttpsUntils.localHost, + String.valueOf(queuedNumber))); + HttpsUntils.sendIndex(list); + } catch (IOException e) { + logger.warn("failed to send EcmResource index :" + e); + } + } + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java new file mode 100644 index 0000000000..346ca9cb06 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryClear.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; + +/** * Task: clean up linkis_ps_job_history_group_history data */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class JobHistoryClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.history.task.cron}") + public void historyTaskClear() { + logger.info("Start to clear_history_task shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "clear_history_task.sh"); + logger.info("clear_history_task shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "clear_history_task.sh"); + logger.info("shell log {}", exec); + logger.info("End to clear_history_task shell "); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java new file mode 100644 index 0000000000..c4bd65a90b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/JobHistoryMonitor.java @@ -0,0 +1,265 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.core.pac.DataFetcher; +import org.apache.linkis.monitor.core.scanner.AnomalyScanner; +import org.apache.linkis.monitor.core.scanner.DefaultScanner; +import org.apache.linkis.monitor.factory.MapperFactory; +import org.apache.linkis.monitor.jobhistory.JobHistoryDataFetcher; +import org.apache.linkis.monitor.jobhistory.errorcode.JobHistoryErrCodeRule; +import org.apache.linkis.monitor.jobhistory.errorcode.JobHistoryErrorCodeAlertSender; +import org.apache.linkis.monitor.jobhistory.jobtime.JobTimeExceedAlertSender; +import org.apache.linkis.monitor.jobhistory.jobtime.JobTimeExceedRule; +import org.apache.linkis.monitor.jobhistory.labels.JobHistoryLabelsAlertSender; +import org.apache.linkis.monitor.jobhistory.labels.JobHistoryLabelsRule; +import org.apache.linkis.monitor.jobhistory.runtime.CommonJobRunTimeRule; +import org.apache.linkis.monitor.jobhistory.runtime.CommonRunTimeAlertSender; +import org.apache.linkis.monitor.jobhistory.runtime.JobHistoryRunTimeAlertSender; +import org.apache.linkis.monitor.jobhistory.runtime.JobHistoryRunTimeRule; +import org.apache.linkis.monitor.until.CacheUtils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.UserLabelAlertUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.*; + +import org.slf4j.Logger; + +/** + * * jobHistory monitor 1.errorCode: Monitor the error code 2.userLabel: tenant label + * monitoring,scan the execution data within the first 20 minutes, and judge the labels field of the + * data + * + *

3.jobResultRunTime: Scan the execution data within the first 20 minutes, and judge the + * completed tasks. If the parm field in the jobhistory contains (task.notification.conditions) and + * the result of executing the task is (Succeed, Failed, Canceled, Timeout, ALL) any one of them, an + * alarm is triggered and the result of the job is that it has ended. All three are indispensable + * + *

4.jobResultRunTimeForDSS: Scan the execution data within the first 20 minutes, scan the tasks + * that have been marked for notification, if the task has ended, a notification will be initiated + * + *

5.jobHistoryUnfinishedScan: monitor the status of the execution task, scan the data outside 12 + * hours and within 24 hours + */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class JobHistoryMonitor { + + private static final Logger logger = LogUtils.stdOutLogger(); + private static final long backtrackNum = 1000000L; + + @Scheduled(cron = "${linkis.monitor.jobHistory.finished.cron}") + public void jobHistoryFinishedScan() { + long intervalMs = 20 * 60 * 1000; + long maxIntervalMs = Constants.ERRORCODE_MAX_INTERVALS_SECONDS() * 1000; + long endTime = System.currentTimeMillis(); + long startTime = endTime - intervalMs; + long realIntervals = Math.min(endTime - startTime, maxIntervalMs); + AnomalyScanner scanner = new DefaultScanner(); + boolean shouldStart = false; + long id; + if (null == CacheUtils.cacheBuilder.getIfPresent("jobHistoryId")) { + long maxId = MapperFactory.getJobHistoryMapper().selectMaxId(); + long beginId = 0L; + if (maxId > backtrackNum) { + beginId = maxId - backtrackNum; + } + id = MapperFactory.getJobHistoryMapper().selectIdByHalfDay(beginId); + CacheUtils.cacheBuilder.put("jobHistoryId", id); + } else { + id = CacheUtils.cacheBuilder.getIfPresent("jobHistoryId"); + } + List fetchers = generateFetchersfortime(startTime, endTime, id, "updated_time"); + if (fetchers.isEmpty()) { + logger.warn("generated 0 dataFetchers, plz check input"); + return; + } + // errorCode + try { + Map errorCodeAlerts = + MonitorAlertUtils.getAlerts(Constants.SCAN_PREFIX_ERRORCODE(), null); + + if (errorCodeAlerts == null || errorCodeAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 errorcode alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} errorcode alert from alert-rules properties file.", + errorCodeAlerts.size()); + shouldStart = true; + addIntervalToImsAlerts(errorCodeAlerts, realIntervals); + JobHistoryErrCodeRule jobHistoryErrCodeRule = + new JobHistoryErrCodeRule( + errorCodeAlerts.keySet(), new JobHistoryErrorCodeAlertSender(errorCodeAlerts)); + scanner.addScanRule(jobHistoryErrCodeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor ErrorCode Faily: " + e.getMessage()); + } + // userLabel + try { + Map userLabelAlerts = + UserLabelAlertUtils.getAlerts(Constants.USER_LABEL_MONITOR(), ""); + if (userLabelAlerts == null || userLabelAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 alerts userLabel alert-rule from alert properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts userLabel alert-rules from alert properties file.", + userLabelAlerts.size()); + shouldStart = true; + JobHistoryLabelsRule jobHistoryLabelsRule = + new JobHistoryLabelsRule(new JobHistoryLabelsAlertSender()); + scanner.addScanRule(jobHistoryLabelsRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor UserLabel Faily: " + e.getMessage()); + } + // jobResultRunTime + try { + Map jobResultAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + if (jobResultAlerts == null || jobResultAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", + jobResultAlerts.size()); + shouldStart = true; + JobHistoryRunTimeRule jobHistoryRunTimeRule = + new JobHistoryRunTimeRule(new JobHistoryRunTimeAlertSender()); + scanner.addScanRule(jobHistoryRunTimeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory Monitor JobResultRunTime Faily: " + e.getMessage()); + } + // jobResultRunTimeForDSS + try { + Map dssJobResultAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + if (dssJobResultAlerts == null || dssJobResultAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 jobResult alert from alert-rule properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobResult alert-rules from alert properties file.", + dssJobResultAlerts.size()); + shouldStart = true; + + CommonJobRunTimeRule commonJobRunTimeRule = + new CommonJobRunTimeRule(new CommonRunTimeAlertSender()); + scanner.addScanRule(commonJobRunTimeRule); + } + } catch (Exception e) { + logger.warn("Jobhistory JobResultRunTimeForDSS ErrorCode Faily: " + e.getMessage()); + } + run(scanner, fetchers, shouldStart); + } + + @Scheduled(cron = "${linkis.monitor.jobHistory.timeout.cron}") + public void jobHistoryUnfinishedScan() { + long id = + Optional.ofNullable(CacheUtils.cacheBuilder.getIfPresent("jobhistoryScan")) + .orElse(MonitorConfig.JOB_HISTORY_TIME_EXCEED.getValue()); + long intervalMs = Constants.TIMEOUT_INTERVALS_SECONDS() * 1000; + long maxIntervalMs = Constants.ERRORCODE_MAX_INTERVALS_SECONDS() * 1000; + long endTime = System.currentTimeMillis(); + long startTime = endTime - intervalMs; + long realIntervals = Math.min(endTime - startTime, maxIntervalMs); + AnomalyScanner scanner = new DefaultScanner(); + boolean shouldStart = false; + List fetchers = + generateFetchers(startTime, endTime, maxIntervalMs, id, "created_time"); + if (fetchers.isEmpty()) { + logger.warn("generated 0 dataFetchers, plz check input"); + return; + } + Map jobTimeAlerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC()), null); + if (jobTimeAlerts == null || jobTimeAlerts.size() == 0) { + logger.info("[INFO] Loaded 0 alerts jobtime alert-rule from alert properties file."); + } else { + logger.info( + "[INFO] Loaded {} alerts jobtime alert-rules from alert properties file.", + jobTimeAlerts.size()); + shouldStart = true; + addIntervalToImsAlerts(jobTimeAlerts, realIntervals); + JobTimeExceedRule jobTimeExceedRule = + new JobTimeExceedRule( + jobTimeAlerts.keySet(), new JobTimeExceedAlertSender(jobTimeAlerts)); + scanner.addScanRule(jobTimeExceedRule); + } + run(scanner, fetchers, shouldStart); + } + + public static void run(AnomalyScanner scanner, List fetchers, Boolean shouldStart) { + if (shouldStart) { + scanner.addDataFetchers(fetchers); + scanner.run(); + } + } + + private static List generateFetchers( + long startTime, long endTime, long maxIntervalMs, long id, String timeType) { + List ret = new ArrayList<>(); + long pe = endTime; + long ps; + while (pe > startTime) { + ps = Math.max(pe - maxIntervalMs, startTime); + String[] fetcherArgs = + new String[] {String.valueOf(ps), String.valueOf(pe), String.valueOf(id), timeType}; + ret.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); + logger.info( + "Generated dataFetcher for startTime: " + new Date(ps) + ". EndTime: " + new Date(pe)); + pe = pe - maxIntervalMs; + } + return ret; + } + + private static List generateFetchersfortime( + long startTime, long endTime, long id, String timeType) { + List fetchers = new ArrayList<>(); + String[] fetcherArgs = + new String[] { + String.valueOf(startTime), String.valueOf(endTime), String.valueOf(id), timeType + }; + fetchers.add(new JobHistoryDataFetcher(fetcherArgs, MapperFactory.getJobHistoryMapper())); + logger.info( + "Generated dataFetcher for startTime: " + + new Date(startTime) + + ". EndTime: " + + new Date(endTime)); + return fetchers; + } + + private static void addIntervalToImsAlerts(Map alerts, long realIntervals) { + for (AlertDesc alert : alerts.values()) { + if (!(alert instanceof ImsAlertDesc)) { + logger.info("[warn] ignore wrong alert" + alert); + } else { + ((ImsAlertDesc) alert).hitIntervalMs_$eq(realIntervals); + } + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java new file mode 100644 index 0000000000..ac3046501c --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/ResourceMonitor.java @@ -0,0 +1,162 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.common.utils.ByteTimeUtils; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.entity.IndexEntity; +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.io.IOException; +import java.math.BigDecimal; +import java.util.*; +import java.util.concurrent.atomic.AtomicReference; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** * Monitor the usage of ECM resources for monitoring and metrics reporting */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class ResourceMonitor { + + private static final Logger logger = LoggerFactory.getLogger(ResourceMonitor.class); + + @Scheduled(cron = "${linkis.monitor.ecm.resource.cron}") + public void ecmResourceTask() { + Map resultmap = null; + AtomicReference tenant = new AtomicReference<>("租户标签:公共资源"); + AtomicReference totalMemory = new AtomicReference<>(0.0); + AtomicReference totalInstance = new AtomicReference<>(0.0); + AtomicReference totalCores = new AtomicReference<>(0.0); + try { + resultmap = HttpsUntils.sendHttp(null, null); + logger.info("ResourceMonitor response {}:", resultmap); + } catch (IOException e) { + logger.warn("failed to get EcmResource data"); + } + // got interface data + Map>> data = MapUtils.getMap(resultmap, "data"); + List> emNodeVoList = data.getOrDefault("EMs", new ArrayList<>()); + StringJoiner minor = new StringJoiner(","); + StringJoiner major = new StringJoiner(","); + // deal ecm resource + emNodeVoList.forEach( + emNodeVo -> { + Map leftResource = MapUtils.getMap(emNodeVo, "leftResource"); + Map maxResource = MapUtils.getMap(emNodeVo, "maxResource"); + // 新增 ECM资源告警,需补充此ECM所属租户 + List> labels = (List>) emNodeVo.get("labels"); + labels.stream() + .filter(labelmap -> labelmap.containsKey("tenant")) + .forEach(map -> tenant.set("租户标签:" + map.get("stringValue").toString())); + String leftmemory = + ByteTimeUtils.bytesToString((long) leftResource.getOrDefault("memory", 0)); + String maxmemory = + ByteTimeUtils.bytesToString((long) maxResource.getOrDefault("memory", 0)); + + String leftmemoryStr = leftmemory.split(" ")[0]; + String maxmemoryStr = maxmemory.split(" ")[0]; + + BigDecimal leftMemory = new BigDecimal(leftmemoryStr); + BigDecimal leftCores = new BigDecimal((int) leftResource.get("cores")); + BigDecimal leftInstance = new BigDecimal((int) leftResource.get("instance")); + totalMemory.set(totalMemory.get() + leftMemory.doubleValue()); + totalInstance.set(totalInstance.get() + leftInstance.doubleValue()); + totalCores.set(totalCores.get() + leftCores.doubleValue()); + + BigDecimal maxMemory = new BigDecimal(maxmemoryStr); + BigDecimal maxCores = new BigDecimal((int) maxResource.get("cores")); + BigDecimal maxInstance = new BigDecimal((int) maxResource.get("instance")); + double memorydouble = + leftMemory.divide(maxMemory, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double coresdouble = + leftCores.divide(maxCores, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + double instancedouble = + leftInstance.divide(maxInstance, 2, BigDecimal.ROUND_HALF_DOWN).doubleValue(); + Double majorValue = MonitorConfig.ECM_TASK_MAJOR.getValue(); + Double minorValue = MonitorConfig.ECM_TASK_MINOR.getValue(); + if (((memorydouble) <= majorValue) + || ((coresdouble) <= majorValue) + || ((instancedouble) <= majorValue)) { + major.add(emNodeVo.get("instance").toString()); + } else if (((memorydouble) < minorValue) + || ((coresdouble) < minorValue) + || ((instancedouble) < minorValue)) { + minor.add(emNodeVo.get("instance").toString()); + } + HashMap replaceParm = new HashMap<>(); + replaceParm.put("$tenant", tenant.get()); + if (StringUtils.isNotBlank(major.toString())) { + replaceParm.put("$instance", major.toString()); + replaceParm.put("$ratio", majorValue.toString()); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), replaceParm); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12004")); + } + if (StringUtils.isNotBlank(minor.toString())) { + replaceParm.put("$instance", minor.toString()); + replaceParm.put("$ratio", minorValue.toString()); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.ALERT_RESOURCE_MONITOR(), replaceParm); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12003")); + } + resourceSendToIms( + coresdouble, memorydouble, instancedouble, HttpsUntils.localHost, "USED"); + }); + resourceSendToIms( + totalCores.get(), totalMemory.get(), totalInstance.get(), HttpsUntils.localHost, "TOTAL"); + } + + private void resourceSendToIms( + Double coresdouble, + Double memorydouble, + Double instancedouble, + String loaclhost, + String name) { + List list = new ArrayList<>(); + logger.info("ResourceMonitor send index "); + String core = "ECM_CPU_"; + String memory = "ECM_MEMORY_"; + String instance = "ECM_INSTANCE_"; + list.add( + new IndexEntity(core.concat(name), "CPU", "INDEX", loaclhost, String.valueOf(coresdouble))); + list.add( + new IndexEntity( + memory.concat(name), "MEMORY", "INDEX", loaclhost, String.valueOf(memorydouble))); + list.add( + new IndexEntity( + instance.concat(name), "INSTANCE", "INDEX", loaclhost, String.valueOf(instancedouble))); + try { + HttpsUntils.sendIndex(list); + } catch (IOException e) { + logger.warn("failed to send EcmResource index"); + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java new file mode 100644 index 0000000000..6def756982 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/TaskLogClear.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.until.ThreadUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.annotation.PropertySource; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.util.ArrayList; +import java.util.List; + +import org.slf4j.Logger; + +/** * Task: clean up logs, file data of ec materials */ +@Component +@PropertySource(value = "classpath:linkis-et-monitor.properties", encoding = "UTF-8") +public class TaskLogClear { + + private static final Logger logger = LogUtils.stdOutLogger(); + + @Scheduled(cron = "${linkis.monitor.clear.taskLog.cron}") + public void taskLogClear() { + logger.info("Start to linkis_task_res_log_clear shell"); + List cmdlist = new ArrayList<>(); + cmdlist.add("sh"); + cmdlist.add(MonitorConfig.shellPath + "linkis_task_res_log_clear.sh"); + logger.info("linkis_task_res_log_clear shell command {}", cmdlist); + String exec = ThreadUtils.run(cmdlist, "linkis_task_res_log_clear.sh"); + logger.info("shell log {}", exec); + logger.info("End to linkis_task_res_log_clear shell "); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java new file mode 100644 index 0000000000..ad6f861479 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/scheduled/UserModeMonitor.java @@ -0,0 +1,177 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.scheduled; + +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.governance.common.entity.task.RequestPersistTask; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; +import org.apache.linkis.manager.label.constant.LabelKeyConstant; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.until.HttpsUntils; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.ujes.client.UJESClient; +import org.apache.linkis.ujes.client.UJESClientImpl; +import org.apache.linkis.ujes.client.request.GetTableStatisticInfoAction; +import org.apache.linkis.ujes.client.request.JobSubmitAction; +import org.apache.linkis.ujes.client.response.GetTableStatisticInfoResult; +import org.apache.linkis.ujes.client.response.JobExecuteResult; +import org.apache.linkis.ujes.client.response.JobInfoResult; + +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Component; + +import java.net.SocketTimeoutException; +import java.util.*; +import java.util.concurrent.TimeUnit; + +import com.google.gson.internal.LinkedTreeMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * * User mode monitoring: regularly trigger scripts to monitor whether the engine status is running + * normally + */ +@Component +public class UserModeMonitor { + + private static final Logger logger = LoggerFactory.getLogger(UserModeMonitor.class); + + private static final DWSClientConfig clientConfig = HttpsUntils.dwsClientConfig; + + private static final UJESClient client = new UJESClientImpl(clientConfig); + + @Scheduled(cron = "${linkis.monitor.user.mode.cron}") + public void job() { + Optional.ofNullable(MonitorConfig.USER_MODE_ENGINE.getValue()) + .ifPresent( + configStr -> { + ArrayList> userModeStr = + BDPJettyServerHelper.gson().fromJson(configStr, ArrayList.class); + userModeStr.forEach( + engine -> { + // 3. build job and execute + JobExecuteResult jobExecuteResult = toSubmit(engine); + logger.info( + "start run engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + HashMap parms = new HashMap<>(); + parms.put("$engineType", engine.get("engineType")); + parms.put("$url", Configuration.GATEWAY_URL().getValue()); + parms.put("$jobId", jobExecuteResult.taskID()); + Utils.sleepQuietly(MonitorConfig.USER_MODE_TIMEOUT.getValue() * 1000); + JobInfoResult jobInfo = client.getJobInfo(jobExecuteResult); + if (jobInfo.isCompleted()) { + if (jobInfo.getJobStatus().equals("Failed")) { + logger.info( + "run fail engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + RequestPersistTask requestPersistTask = jobInfo.getRequestPersistTask(); + parms.put("$errorCode", String.valueOf(requestPersistTask.getErrCode())); + parms.put("$errorMsg", requestPersistTask.getErrDesc()); + Map failedAlerts = + MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(failedAlerts.get("12012")); + } + } else { + logger.info( + "run timeout engineType: {},job id : {}", + engine.get("engineType"), + jobExecuteResult.taskID()); + Map alerts = + MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + PooledImsAlertUtils.addAlert(alerts.get("12011")); + } + }); + }); + } + + private static JobExecuteResult toSubmit(LinkedTreeMap engine) { + // 1. build params + // set label map :EngineTypeLabel/UserCreatorLabel/EngineRunTypeLabel/Tenant + Map labels = new HashMap(); + labels.put( + LabelKeyConstant.ENGINE_TYPE_KEY, engine.get("engineType")); // required engineType Label + labels.put( + LabelKeyConstant.USER_CREATOR_TYPE_KEY, + engine.get("executeUser") + "-IDE"); // required execute user and creator eg:hadoop-IDE + labels.put(LabelKeyConstant.CODE_TYPE_KEY, engine.get("runType")); // required codeType + Map startupMap = new HashMap(16); + // setting linkis params + // startupMap.put("wds.linkis.rm.yarnqueue", "dws"); + // 2. build jobSubmitAction + JobSubmitAction jobSubmitAction = + JobSubmitAction.builder() + .addExecuteCode(engine.get("code")) + .setStartupParams(startupMap) + .setUser(engine.get("executeUser")) // submit user + .addExecuteUser(engine.get("executeUser")) // execute user + .setLabels(labels) + .build(); + // 3. to execute + return client.submit(jobSubmitAction); + } + + @Scheduled(cron = "${linkis.monitor.user.db.cron:0 0/10 * * * ?}") + public void dbJob() { + Map properties = new HashMap<>(); + properties.put("readTimeout", MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue()); + DWSClientConfig clientConfig = HttpsUntils.createClientConfig(null, properties); + UJESClientImpl ujesClient = new UJESClientImpl(clientConfig); + GetTableStatisticInfoAction builder = + GetTableStatisticInfoAction.builder() + .setUser("hadoop") + .setDatabase("default") + .setTable("dual") + .builder(); + HashMap parms = new HashMap<>(); + try { + GetTableStatisticInfoResult tableStatisticInfo = ujesClient.getTableStatisticInfo(builder); + if (tableStatisticInfo.getStatus() != 0) { + logger.info("元数据查询服务用户态,执行失败,异常信息:" + tableStatisticInfo.getMessage()); + // parms.put("$msg", tableStatisticInfo.getMessage()); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); + } + } catch (Exception e) { + if (e instanceof SocketTimeoutException) { + Integer timeoutValue = MonitorConfig.USER_MODE_INTERFACE_TIMEOUT.getValue(); + long timeout = TimeUnit.MILLISECONDS.toSeconds(timeoutValue); + logger.info("元数据查询服务用户态,执行超时:" + timeout + "秒"); + // parms.put("$timeout", String.valueOf(timeout)); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12018")); + } else { + logger.error("元数据查询服务用户态,执行异常:" + e); + // parms.put("$msg", e.getMessage()); + // Map failedAlerts = + // MonitorAlertUtils.getAlerts(Constants.USER_RESOURCE_MONITOR(), parms); + // PooledImsAlertUtils.addAlert(failedAlerts.get("12017")); + } + } + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java new file mode 100644 index 0000000000..a768fde555 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/CacheUtils.java @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.until; + +import java.util.concurrent.TimeUnit; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; + +public class CacheUtils { + + public static Cache cacheBuilder = + CacheBuilder.newBuilder() + .concurrencyLevel(5) + .expireAfterAccess(1, TimeUnit.DAYS) + .initialCapacity(20) + .maximumSize(1000) + .recordStats() + .build(); +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java new file mode 100644 index 0000000000..a504a9d41d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/HttpsUntils.java @@ -0,0 +1,173 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.until; + +import org.apache.linkis.bml.conf.BmlConfiguration; +import org.apache.linkis.common.conf.Configuration; +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy; +import org.apache.linkis.httpclient.dws.config.DWSClientConfig; +import org.apache.linkis.httpclient.dws.config.DWSClientConfigBuilder; +import org.apache.linkis.monitor.client.MonitorHTTPClient; +import org.apache.linkis.monitor.client.MonitorHTTPClientClientImpl; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.entity.IndexEntity; +import org.apache.linkis.monitor.request.EmsListAction; +import org.apache.linkis.monitor.request.EntranceTaskAction; +import org.apache.linkis.monitor.response.EntranceTaskResult; +import org.apache.linkis.server.BDPJettyServerHelper; +import org.apache.linkis.ujes.client.response.EmsListResult; + +import org.apache.commons.collections.MapUtils; +import org.apache.commons.lang3.StringUtils; +import org.apache.http.client.config.RequestConfig; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.entity.ContentType; +import org.apache.http.entity.StringEntity; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; +import org.apache.http.util.EntityUtils; + +import org.springframework.util.Assert; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class HttpsUntils { + private static final Logger logger = LoggerFactory.getLogger(HttpsUntils.class); + + public static DWSClientConfig dwsClientConfig = createClientConfig(null, null); + // IOUtils.closeQuietly(client); + public static MonitorHTTPClient client = new MonitorHTTPClientClientImpl(dwsClientConfig); + public static final String localHost = Utils.getLocalHostname(); + + public static Map sendHttp(String url, Map properties) + throws IOException { + if (null == dwsClientConfig) { + dwsClientConfig = createClientConfig(url, properties); + } + if (null == client) { + client = new MonitorHTTPClientClientImpl(dwsClientConfig); + } + EmsListAction build = EmsListAction.newBuilder().setUser("hadoop").build(); + EmsListResult result = client.list(build); + return result.getResultMap(); + } + + public static DWSClientConfig createClientConfig(String url, Map properties) { + String realUrl = ""; + if (StringUtils.isBlank(url)) { + realUrl = Configuration.getGateWayURL(); + } else { + realUrl = url; + } + Map parms = new HashMap<>(); + if (MapUtils.isNotEmpty(properties)) { + parms = properties; + } + int maxConnection = + (int) + parms.getOrDefault( + BmlConfiguration.CONNECTION_MAX_SIZE_SHORT_NAME(), + BmlConfiguration.CONNECTION_MAX_SIZE().getValue()); + int connectTimeout = + (int) + parms.getOrDefault( + BmlConfiguration.CONNECTION_TIMEOUT_SHORT_NAME(), + BmlConfiguration.CONNECTION_TIMEOUT().getValue()); + int readTimeout = + (int) + parms.getOrDefault( + BmlConfiguration.CONNECTION_READ_TIMEOUT_SHORT_NAME(), + BmlConfiguration.CONNECTION_READ_TIMEOUT().getValue()); + String tokenKey = + (String) + parms.getOrDefault( + BmlConfiguration.AUTH_TOKEN_KEY_SHORT_NAME(), + BmlConfiguration.AUTH_TOKEN_KEY().getValue()); + String tokenValue = + (String) + parms.getOrDefault( + BmlConfiguration.AUTH_TOKEN_VALUE_SHORT_NAME(), + BmlConfiguration.AUTH_TOKEN_VALUE().getValue()); + + DWSClientConfig clientConfig = + ((DWSClientConfigBuilder) + (DWSClientConfigBuilder.newBuilder() + .addServerUrl(realUrl) + .connectionTimeout(connectTimeout) + .discoveryEnabled(false) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(false) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(new TokenAuthenticationStrategy()) + .setAuthTokenKey(tokenKey) + .setAuthTokenValue(tokenValue))) + .setDWSVersion("v1") + .build(); + + return clientConfig; + } + + public static Map getEntranceTask(String url, String user, String Instance) + throws IOException { + if (null == dwsClientConfig) { + dwsClientConfig = createClientConfig(null, null); + } + if (null == client) { + client = new MonitorHTTPClientClientImpl(dwsClientConfig); + } + EntranceTaskAction build = + EntranceTaskAction.newBuilder().setUser(user).setInstance(Instance).build(); + EntranceTaskResult result = client.entranList(build); + return result.getResultMap(); + } + + public static void sendIndex(List list) throws IOException { + Map parm = new HashMap<>(); + parm.put("userAuthKey", MonitorConfig.ECM_TASK_USER_AUTHKEY.getValue()); + parm.put("metricDataList", list); + String json = BDPJettyServerHelper.gson().toJson(parm); + + RequestConfig requestConfig = RequestConfig.DEFAULT; + StringEntity entity = + new StringEntity( + json, ContentType.create(ContentType.APPLICATION_JSON.getMimeType(), "UTF-8")); + entity.setContentEncoding("UTF-8"); + + HttpPost httpPost = new HttpPost(MonitorConfig.ECM_TASK_IMURL.getValue()); + httpPost.setConfig(requestConfig); + httpPost.setEntity(entity); + + CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse execute = httpClient.execute(httpPost); + String responseStr = EntityUtils.toString(execute.getEntity(), "UTF-8"); + Map map = BDPJettyServerHelper.gson().fromJson(responseStr, Map.class); + logger.info("send index response :{}", map); + Assert.isTrue(!"0".equals(map.get("resultCode")), map.get("resultMsg")); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java new file mode 100644 index 0000000000..15a2626379 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/java/org/apache/linkis/monitor/until/ThreadUtils.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.until; + +import org.apache.linkis.common.utils.Utils; +import org.apache.linkis.monitor.config.MonitorConfig; +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.utils.alert.AlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.monitor.utils.log.LogUtils; + +import org.springframework.context.ApplicationContext; +import org.springframework.context.event.ApplicationContextEvent; + +import java.util.*; +import java.util.concurrent.*; + +import scala.concurrent.ExecutionContextExecutorService; + +import org.slf4j.Logger; + +public class ThreadUtils extends ApplicationContextEvent { + + private static final Logger logger = LogUtils.stdOutLogger(); + + public static ExecutionContextExecutorService executors = + Utils.newCachedExecutionContext(5, "alert-pool-thread-", false); + + public ThreadUtils(ApplicationContext source) { + super(source); + } + + public static String run(List cmdList, String shellName) { + FutureTask future = new FutureTask(() -> Utils.exec(cmdList.toArray(new String[2]), -1)); + executors.submit(future); + String msg = ""; + try { + msg = future.get(MonitorConfig.SHELL_TIMEOUT.getValue(), TimeUnit.MINUTES).toString(); + } catch (TimeoutException e) { + logger.info("execute shell time out {}", shellName); + HashMap parms = new HashMap<>(); + parms.put("$shellName", shellName); + Map ecmResourceAlerts = + MonitorAlertUtils.getAlerts(Constants.THREAD_TIME_OUT_IM(), parms); + PooledImsAlertUtils.addAlert(ecmResourceAlerts.get("12014")); + } catch (ExecutionException | InterruptedException e) { + logger.error("Thread error msg {}", e.getMessage()); + } + return msg; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml new file mode 100644 index 0000000000..6c51f6d0aa --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InsLabelRelationMapper.xml @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + l.`id`, l.`label_key`, l.`label_value`, l.`label_feature`, + l.`label_value_size`, l.`update_time`, l.`create_time` + + + + s.`id`, s.`instance`, s.`name`, s.`update_time`, s.`create_time` + + + + + + + + + \ No newline at end of file diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml new file mode 100644 index 0000000000..d5309a5f04 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceInfoMapper.xml @@ -0,0 +1,53 @@ + + + + + + + + + + + + + + + + `id`, `instance`, `name`, `update_time`, + `create_time` + + + + DELETE FROM linkis_ps_instance_info WHERE instance = #{instance} + + + + + + + \ No newline at end of file diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml new file mode 100644 index 0000000000..e7c7558017 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/InstanceLabelMapper.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + `id`, `label_key`, `label_value`, `label_feature`, + `label_value_size`, `update_time`, `create_time` + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml new file mode 100644 index 0000000000..730e8a43da --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/resources/mapper/common/JobHistoryMapper.xml @@ -0,0 +1,172 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + `id`,`job_req_id`,`submit_user`,`execute_user`,`labels`,`params`,`status`,`error_code`,`created_time`, + `updated_time`,`instances`,`observe_info` + + + + + + + + UPDATE linkis_ps_job_history_group_history + + status = #{targetStatus} + + + + #{element} + + + + + + UPDATE linkis_ps_job_history_group_history + + status = #{targetStatus}, error_code=21304, error_desc='Automatically killed because entrance is dead' + + + created_time >= #{startDate} + AND instances = #{instanceName} + AND + + #{element} + + + LIMIT 5000 + + + + + + + + + + diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala new file mode 100644 index 0000000000..4154661f07 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/LinkisJobHistoryScanSpringConfiguration.scala @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor + +import org.apache.linkis.monitor.factory.MapperFactory +import org.apache.linkis.monitor.instance.dao.InstanceInfoDao +import org.apache.linkis.monitor.jobhistory.dao.JobHistoryMapper + +import org.springframework.beans.factory.annotation.Autowired +import org.springframework.context.annotation.{ComponentScan, Configuration} + +import javax.annotation.PostConstruct + +@Configuration +@ComponentScan(Array("org.apache.linkis.monitor.scan", "org.apache.linkis.mybatis")) +class LinkisJobHistoryScanSpringConfiguration { + + @Autowired + private var jobHistoryMapper: JobHistoryMapper = _ + + @Autowired + private var instanceInfoMapper: InstanceInfoDao = _ + + @PostConstruct + def init(): Unit = { + MapperFactory.setJobHistoryMapper(jobHistoryMapper) + MapperFactory.setInstanceInfoMapper(instanceInfoMapper) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala new file mode 100644 index 0000000000..4caccd73a3 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClient.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.authentication.AuthenticationStrategy +import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.{EmsListAction, EntranceTaskAction, MonitorResourceAction} +import org.apache.linkis.monitor.response.EntranceTaskResult +import org.apache.linkis.ujes.client.response.EmsListResult + +import java.io.Closeable +import java.util.concurrent.TimeUnit + +abstract class MonitorHTTPClient extends Closeable { + + protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result + + def list(emsListAction: EmsListAction): EmsListResult = { + executeJob(emsListAction).asInstanceOf[EmsListResult] + } + + def entranList(entranceTaskAction: EntranceTaskAction): EntranceTaskResult = { + executeJob(entranceTaskAction).asInstanceOf[EntranceTaskResult] + } + +} + +object MonitorHTTPClient { + + def apply(clientConfig: DWSClientConfig): MonitorHTTPClient = new MonitorHTTPClientClientImpl( + clientConfig + ) + + def apply(serverUrl: String): MonitorHTTPClient = apply(serverUrl, 30000, 10) + + def apply(serverUrl: String, readTimeout: Int, maxConnection: Int): MonitorHTTPClient = + apply(serverUrl, readTimeout, maxConnection, new StaticAuthenticationStrategy, "v1") + + def apply( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorHTTPClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(false) + .loadbalancerEnabled(false) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + + def getDiscoveryClient(serverUrl: String): MonitorHTTPClient = + getDiscoveryClient(serverUrl, 30000, 10) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int + ): MonitorHTTPClient = + getDiscoveryClient( + serverUrl, + readTimeout, + maxConnection, + new StaticAuthenticationStrategy, + "v1" + ) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorHTTPClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(true) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(true) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala new file mode 100644 index 0000000000..5554701571 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorHTTPClientClientImpl.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.dws.config.DWSClientConfig +import org.apache.linkis.httpclient.request.Action +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.MonitorResourceAction + +class MonitorHTTPClientClientImpl(clientConfig: DWSClientConfig) extends MonitorHTTPClient { + + private val dwsHttpClient = + new DWSHttpClient(clientConfig, "Linkis-MonitorResource-Execution-Thread") + + override protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result = + ujesJobAction match { + + case action: Action => dwsHttpClient.execute(action) + + } + + override def close(): Unit = dwsHttpClient.close() +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala new file mode 100644 index 0000000000..d0660e1116 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClient.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.authentication.AuthenticationStrategy +import org.apache.linkis.httpclient.dws.authentication.StaticAuthenticationStrategy +import org.apache.linkis.httpclient.dws.config.{DWSClientConfig, DWSClientConfigBuilder} +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.{EmsListAction, MonitorResourceAction} +import org.apache.linkis.ujes.client.response.EmsListResult + +import java.io.Closeable +import java.util.concurrent.TimeUnit + +abstract class MonitorResourceClient extends Closeable { + + protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result + + def list(jobListAction: EmsListAction): EmsListResult = { + executeJob(jobListAction).asInstanceOf[EmsListResult] + } + +} + +object MonitorResourceClient { + + def apply(clientConfig: DWSClientConfig): MonitorResourceClient = new MonitorResourceClientImpl( + clientConfig + ) + + def apply(serverUrl: String): MonitorResourceClient = apply(serverUrl, 30000, 10) + + def apply(serverUrl: String, readTimeout: Int, maxConnection: Int): MonitorResourceClient = + apply(serverUrl, readTimeout, maxConnection, new StaticAuthenticationStrategy, "v1") + + def apply( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorResourceClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(false) + .loadbalancerEnabled(false) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + + def getDiscoveryClient(serverUrl: String): MonitorResourceClient = + getDiscoveryClient(serverUrl, 30000, 10) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int + ): MonitorResourceClient = + getDiscoveryClient( + serverUrl, + readTimeout, + maxConnection, + new StaticAuthenticationStrategy, + "v1" + ) + + def getDiscoveryClient( + serverUrl: String, + readTimeout: Int, + maxConnection: Int, + authenticationStrategy: AuthenticationStrategy, + dwsVersion: String + ): MonitorResourceClient = { + val clientConfig = DWSClientConfigBuilder + .newBuilder() + .addServerUrl(serverUrl) + .connectionTimeout(30000) + .discoveryEnabled(true) + .discoveryFrequency(1, TimeUnit.MINUTES) + .loadbalancerEnabled(true) + .maxConnectionSize(maxConnection) + .retryEnabled(false) + .readTimeout(readTimeout) + .setAuthenticationStrategy(authenticationStrategy) + .setDWSVersion(dwsVersion) + .build() + apply(clientConfig) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala new file mode 100644 index 0000000000..06cff3b46a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/client/MonitorResourceClientImpl.scala @@ -0,0 +1,39 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.client + +import org.apache.linkis.httpclient.dws.DWSHttpClient +import org.apache.linkis.httpclient.dws.config.DWSClientConfig +import org.apache.linkis.httpclient.request.Action +import org.apache.linkis.httpclient.response.Result +import org.apache.linkis.monitor.request.MonitorResourceAction + +class MonitorResourceClientImpl(clientConfig: DWSClientConfig) extends MonitorResourceClient { + + private val dwsHttpClient = + new DWSHttpClient(clientConfig, "Linkis-MonitorResource-Execution-Thread") + + override protected[client] def executeJob(ujesJobAction: MonitorResourceAction): Result = + ujesJobAction match { + + case action: Action => dwsHttpClient.execute(action) + + } + + override def close(): Unit = dwsHttpClient.close() +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala new file mode 100644 index 0000000000..04a0438794 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/Constants.scala @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.constants + +import org.apache.linkis.common.conf.CommonVars + +object Constants { + + val SCAN_PREFIX_ERRORCODE = "jobhistory.errorcode." + val SCAN_PREFIX_UNFINISHED_JOBTIME_EXCEED_SEC = "jobhistory.unfinished.time.exceed.sec." + val ALERT_RESOURCE_MONITOR = "ecm.resource.monitor.im." + + val UNFINISHED_JOB_STATUS = + "Inited,WaitForRetry,Scheduled,Running".split(",").map(s => s.toUpperCase()) + + val FINISHED_JOB_STATUS = + "Succeed,Failed,Cancelled,Timeout".split(",").map(s => s.toUpperCase()) + + val DATA_FINISHED_JOB_STATUS_ARRAY = "Succeed,Failed,Cancelled,Timeout".split(",") + + val DATA_UNFINISHED_JOB_STATUS_ARRAY = + "Inited,WaitForRetry,Scheduled,Running".split(",") + + val ALERT_PROPS_FILE_PATH = CommonVars.properties.getProperty( + "linkis.alert.conf.file.path", + "linkis-et-monitor-file.properties" + ) + + val ALERT_IMS_URL = CommonVars.properties.getProperty( + "linkis.alert.url", + "http://127.0.0.1:10812/ims_data_access/send_alarm.do" + ) + + val ALERT_SUB_SYSTEM_ID = + CommonVars.properties.getProperty("linkis.alert.sub_system_id", "10001") + + val ALERT_DEFAULT_RECEIVERS = CommonVars.properties + .getProperty("linkis.alert.receiver.default", "") + .split(",") + .toSet[String] + + val ALERT_IMS_MAX_LINES = CommonVars[Int]("linkis.alert.content.max.lines", 8).getValue + + val TIMEOUT_INTERVALS_SECONDS = + CommonVars[Long]("linkis.monitor.scanner.timeout.interval.seconds", 1 * 60 * 60).getValue + + val ERRORCODE_MAX_INTERVALS_SECONDS = + CommonVars[Long]("linkis.errorcode.scanner.max.interval.seconds", 1 * 60 * 60).getValue + + val SCAN_RULE_UNFINISHED_JOB_STATUS = + "Inited,WaitForRetry,Scheduled,Running".split(",").map(s => s.toUpperCase()) + + val USER_LABEL_MONITOR = "jobhistory.label.monitor.im." + + val USER_LABEL_TENANT: CommonVars[String] = + CommonVars[String]("linkis.monitor.jobhistory.userLabel.tenant", "{}") + + val USER_RESOURCE_MONITOR = "user.mode.monitor.im." + val BML_CLEAR_IM = "bml.clear.monitor.im." + val THREAD_TIME_OUT_IM = "thread.monitor.timeout.im." + val JOB_RESULT_IM = "jobhistory.result.monitor.im." + + val BML_VERSION_MAX_NUM: CommonVars[Int] = + CommonVars[Int]("linkis.monitor.bml.cleaner.version.max.num", 50) + + val BML_VERSION_KEEP_NUM: CommonVars[Int] = + CommonVars[Int]("linkis.monitor.bml.cleaner.version.keep.num", 20) + + val BML_PREVIOUS_INTERVAL_TIME_DAYS: CommonVars[Long] = + CommonVars[Long]("linkis.monitor.bml.cleaner.previous.interval.days", 30) + + val BML_CLEAN_ONCE_RESOURCE_LIMIT_NUM: CommonVars[Int] = + CommonVars[Int]("linkis.monitor.bml.cleaner.once.limit.num", 100) + + val BML_TRASH_PATH_PREFIX: CommonVars[String] = + CommonVars[String]("linkis.monitor.bml.trash.prefix.path", "hdfs:///tmp/linkis/trash/bml_trash") + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala new file mode 100644 index 0000000000..02fafa56d6 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/constants/ScanOperatorEnum.scala @@ -0,0 +1,23 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.constants + +object ScanOperatorEnum extends Enumeration { + type ScanOperatorEnum = Value + val BML_VERSION, JOB_HISTORY = Value +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala new file mode 100644 index 0000000000..bf0508fc38 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Event.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.ob + +trait Event { + def isRegistered: Boolean + + def register(observer: Observer): Unit + + def unRegister(observer: Observer): Unit + + def notifyObserver(event: Event, message: Any): Unit +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala new file mode 100644 index 0000000000..aecc9f7fd4 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/Observer.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.ob + +trait Observer { + + /** + * Observer Pattern + */ + def update(event: Event, msg: Any): Unit +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java new file mode 100644 index 0000000000..0414266668 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/ob/SingleObserverEvent.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.ob; + + +public class SingleObserverEvent implements Event { + private Observer observer; + + @Override + public boolean isRegistered() { + return observer != null; + } + + @Override + public void register(Observer observer) { + this.observer = observer; + } + + @Override + public void unRegister(Observer observer) { + this.observer = null; + } + + @Override + public void notifyObserver(Event event, Object message) { + observer.update(event, message); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala new file mode 100644 index 0000000000..21a75986a1 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractDataFetcher.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +abstract class AbstractDataFetcher(customName: String = "") extends DataFetcher { + + private val name: String = if (!customName.isEmpty) { + customName + } else { + this.getClass.getName + "@" + Integer.toHexString(this.hashCode) + } + + def getName(): String = this.name +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala new file mode 100644 index 0000000000..eedf532238 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/AbstractScanRule.scala @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.core.ob.Observer + +abstract class AbstractScanRule(customName: String = "", event: Event, observer: Observer) + extends ScanRule + with Logging { + event.register(observer) + + private val name: String = if (!customName.isEmpty) { + customName + } else { + this.getClass.getName + "@" + Integer.toHexString(this.hashCode) + } + + def getName(): String = this.name + + /** + * register an observer to trigger if this rule is matched + * + * @param observer + */ + override def addObserver(observer: Observer): Unit = event.register(observer) + + /** + * return registered event + * + * @return + */ + override def getHitEvent(): Event = event + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala new file mode 100644 index 0000000000..3597eb78d0 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/BaseScannedData.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util + +class BaseScannedData(owner: String, data: util.List[scala.Any]) extends ScannedData { + override def getOwner(): String = this.owner + + override def getData(): util.List[scala.Any] = this.data +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala new file mode 100644 index 0000000000..3b86ce6c3d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/DataFetcher.scala @@ -0,0 +1,40 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util + +/** + * ScanOperator should encapsulate lower-level client for accessing data from an arbitrary + * datasource. e.g. if we want to scan a DB table. Then operator should encapsulate a DAO + */ +trait DataFetcher { + def getName(): String + + /** + * get arguments for querying data + * + * @return + */ + def getArgs(): Array[scala.Any] + + /** + * make a query to mysql/hive etc. given args + */ + def getData(): util.List[scala.Any] +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala new file mode 100644 index 0000000000..8518738606 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanBuffer.scala @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util +import java.util.concurrent.LinkedBlockingDeque + +class ScanBuffer { + val buffer: LinkedBlockingDeque[ScannedData] = new LinkedBlockingDeque[ScannedData] + + def write(data: ScannedData): Unit = buffer.add(data) + + def write(data: util.List[ScannedData]): Unit = buffer.addAll(data) + + def drain(maxSize: Int = -1): util.List[ScannedData] = { + val ret = new util.LinkedList[ScannedData] + val realSize = if (maxSize < 0) { + buffer.size + } else { + maxSize + } + buffer.drainTo(ret, realSize) + return ret + } + + def size(): Int = buffer.size() +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala new file mode 100644 index 0000000000..fa599c4a68 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScanRule.scala @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.core.ob.Observer + +import java.util + +trait ScanRule { + + def getName(): String + + /** + * register an observer to trigger if this rule is matched + * + * @param observer + */ + def addObserver(observer: Observer): Unit + + /** + * return registered event + * + * @return + */ + def getHitEvent(): Event + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + def triggerIfMatched(data: util.List[ScannedData]): Boolean +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala new file mode 100644 index 0000000000..2c4c1e0ded --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/pac/ScannedData.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.pac + +import java.util + +trait ScannedData { + def getOwner(): String + + def getData(): util.List[scala.Any] +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala new file mode 100644 index 0000000000..4f207cd697 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AbstractScanner.scala @@ -0,0 +1,164 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.scanner + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.core.pac._ +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException + +import java.util +import java.util.concurrent.CopyOnWriteArrayList +import java.util.concurrent.atomic.AtomicInteger + +abstract class AbstractScanner extends AnomalyScanner with Logging { + private val buffer: ScanBuffer = new ScanBuffer + + private val dataFetcherIdx: AtomicInteger = + new AtomicInteger(0) // mark next fetcher for sequentially produce data + + private val dataFetcherList: CopyOnWriteArrayList[DataFetcher] = + new CopyOnWriteArrayList[DataFetcher] + + private val scanRuleList: CopyOnWriteArrayList[ScanRule] = new CopyOnWriteArrayList[ScanRule] + + /** + * Producer + */ + override def addDataFetcher(fetcher: DataFetcher): Unit = { + if (fetcher != null) { + dataFetcherList.add(fetcher) + } else { + logger.warn("ignore null DataFetcher") + } + } + + override def addDataFetchers(fetchers: util.List[DataFetcher]): Unit = { + if (fetchers != null && fetchers.size != 0) { + dataFetcherList.addAll(fetchers) + } else { + logger.warn("ignore null or empty DataFetcher") + } + } + + override def getDataFetchers: util.List[DataFetcher] = dataFetcherList + + /** + * directly feed data to buffer + */ + override def feedData(data: util.List[ScannedData]): Unit = { + if (data != null && data.size != 0) { + buffer.write(data) + } else { + logger.warn("Fed with null or empty data") + } + } + + /** + * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by other + * thread + */ + override def getBuffer(): ScanBuffer = buffer + + /** + * add rules to scanner + */ + override def addScanRule(rule: ScanRule): Unit = { + if (rule != null) { + scanRuleList.add(rule) + } else { + logger.warn("ignore null ScanRule") + } + } + + override def addScanRules(rules: util.List[ScanRule]): Unit = { + if (rules != null && rules.size != 0) { + scanRuleList.addAll(rules) + } else { + logger.warn("ignore null or empty ScanRule") + } + } + + override def getScanRules(): util.List[ScanRule] = scanRuleList + + /** + * blocking call, scan and analyze until all dataFetchers are accessed once + */ + override def run(): Unit = { + if (dataFetcherList.size() == 0) { + throw new AnomalyScannerException(21304, "attempting to run scanner with empty dataFetchers") + } + if (buffer == null) { + throw new AnomalyScannerException(21304, "attempting to run scanner with null buffer") + } + if (scanRuleList.size == 0) { + throw new AnomalyScannerException(21304, "attempting to run scanner with empty rules") + } + while (dataFetcherIdx.get() < dataFetcherList.size()) { + scanOneIteration() + analyzeOneIteration() + } + } + + /** + * 1. scan data for 1 iteration 2. should be a blocking call 3. see if [[ScanRule]] is matched + * 4. trigger [[Event]] and inform observer + */ + override def scanOneIteration(): Unit = { + val idx = dataFetcherIdx.getAndIncrement() + val fetcher = dataFetcherList.get(idx) + if (fetcher != null) { + val rawData = fetcher.getData() + logger.info("scanned " + rawData.size + " data. Rule: " + fetcher.getName); + if (rawData != null && rawData.size != 0) { + buffer.write(new BaseScannedData(fetcher.getName, rawData)) + } + } else { + logger.warn("ignored null fetcher!!") + } + } + + /** + * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched 3. + * trigger [[Observer]] + */ + override def analyzeOneIteration(): Unit = { + val dataToAnalyze = buffer.drain() + if (dataToAnalyze != null && dataToAnalyze.size() != 0) { + val len = scanRuleList.size() + for (i <- 0 until len) { + val scanRule = scanRuleList.get(i) + if (scanRule != null) { + logger.info("analyzing " + dataToAnalyze.size + " data. Rule: " + scanRule.getName) + scanRule.triggerIfMatched(dataToAnalyze) + } else { + logger.warn("found empty or null ScanRule") + } + } + } else { + logger.info("analyzed 0 data.") + } + } + + /** + * 1. should be non-blocking 2. keeps calling scanOneIteration() and analyzeOneIteration() utils + * stop() is called + */ + override def start(): Unit = {} + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala new file mode 100644 index 0000000000..7fa84d3879 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/AnomalyScanner.scala @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.scanner + +import org.apache.linkis.monitor.core.ob.Event +import org.apache.linkis.monitor.core.pac.{DataFetcher, ScanBuffer, ScannedData, ScanRule} +import org.apache.linkis.monitor.core.pac.DataFetcher + +import java.util + +/** + * A Scanner that: + * 1. scan a datasource using [[DataFetcher]], write data into [[ScanBuffer]] 2. read data from + * [[ScanBuffer]] see if [[ScanRule]] is matched 3. trigger [[Event]] in [[ScanRule]] and + * inform observer + */ +trait AnomalyScanner { + + /** + * Producer + */ + def addDataFetcher(dataFetcher: DataFetcher): Unit + + def addDataFetchers(dataFetchers: util.List[DataFetcher]): Unit + + def getDataFetchers: util.List[DataFetcher] + + /** + * directly feed data to buffer + */ + def feedData(data: util.List[ScannedData]): Unit + + /** + * Buffer + */ + + /** + * add rules to scanner + */ + def addScanRule(rule: ScanRule): Unit + + def addScanRules(rules: util.List[ScanRule]): Unit + + /** + * Consumer + */ + + def getScanRules(): util.List[ScanRule] + + /** + * scan and analyze for 1 iteration + */ + def run(): Unit + + /** + * 1. should be non-blocking 2. keeps calling scan() utils stop() is called + */ + def start(): Unit + + def shutdown(): Unit + + /** + * 1. should be a blocking call 2. call [[DataFetcher]] to read data 3. write result to + * [[ScanBuffer]] + */ + protected def scanOneIteration(): Unit + + /** + * Returns a buffer that allows read/write simultaneously buffer is allowed to be written by other + * thread + */ + protected def getBuffer(): ScanBuffer + + /** + * 1. should be a blocking call 2. read from [[ScanBuffer]] 2. see if [[ScanRule]] is matched 3. + * trigger [[[[org.apache.linkis.tools.core.ob.Observer]]]] + */ + protected def analyzeOneIteration(): Unit + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala new file mode 100644 index 0000000000..80ab7a5498 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/core/scanner/DefaultScanner.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.core.scanner + +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils + +class DefaultScanner extends AbstractScanner { + + override def shutdown(): Unit = { + PooledImsAlertUtils.shutDown(true, -1) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala new file mode 100644 index 0000000000..eb503c52aa --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/factory/MapperFactory.scala @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.factory + +import org.apache.linkis.monitor.instance.dao.{ + InsLabelRelationDao, + InstanceInfoDao, + InstanceLabelDao +} +import org.apache.linkis.monitor.jobhistory.dao.JobHistoryMapper + +object MapperFactory { + + private var jobHistoryMapper: JobHistoryMapper = _ + + private var instanceInfoMapper: InstanceInfoDao = _ + + private var instanceLabelMapper: InstanceLabelDao = _ + + private var instanceLabelRelationMapper: InsLabelRelationDao = _ + + def getJobHistoryMapper(): JobHistoryMapper = jobHistoryMapper + + def setJobHistoryMapper(jobHistoryMapper: JobHistoryMapper): Unit = { + MapperFactory.jobHistoryMapper = jobHistoryMapper + } + + def getInstanceInfoMapper(): InstanceInfoDao = instanceInfoMapper + + def setInstanceInfoMapper(instanceInfoMapper: InstanceInfoDao): Unit = { + MapperFactory.instanceInfoMapper = instanceInfoMapper + } + + def getInstanceLabelMapper(): InstanceLabelDao = instanceLabelMapper + + def setInstanceLabelMapper(instanceLabelMapper: InstanceLabelDao): Unit = { + MapperFactory.instanceLabelMapper = instanceLabelMapper + } + + def getInsLabelRelationMapper(): InsLabelRelationDao = instanceLabelRelationMapper + + def setInsLabelRelationMapper(instanceLabelRelationMapper: InsLabelRelationDao): Unit = { + MapperFactory.instanceLabelRelationMapper = instanceLabelRelationMapper + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala new file mode 100644 index 0000000000..fb371a658d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/JobHistoryDataFetcher.scala @@ -0,0 +1,117 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.pac.AbstractDataFetcher +import org.apache.linkis.monitor.jobhistory.dao.JobHistoryMapper +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException + +import org.apache.commons.lang3.StringUtils + +import java.util +import java.util.Date + +class JobHistoryDataFetcher(args: Array[Any], mapper: JobHistoryMapper) + extends AbstractDataFetcher + with Logging { + + /** + * retrieve JobHistory Data starts from startTimeMs and ends at startTimeMs + intervalsMs + * + * @return + */ + /** + * get arguments for querying data + * + * @return + */ + override def getArgs(): Array[Any] = args + + /** + * 1. get Data given some arguments + */ + override def getData(): util.List[scala.Any] = { + if (!args.isInstanceOf[Array[String]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryDataFetcher. DataType: " + args.getClass.getCanonicalName + ) + } + if (args != null && args.length == 2) { + val start = Utils.tryCatch(args(0).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + val end = Utils.tryCatch(args(1).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + mapper + .search(null, null, null, new Date(start), new Date(end), null) + .asInstanceOf[util.List[scala.Any]] + } else if (args != null && args.length == 4) { + val start = Utils.tryCatch(args(0).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + val end = Utils.tryCatch(args(1).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + val id = Utils.tryCatch(args(2).asInstanceOf[String].toLong) { t => + { + logger.error("Failed to get data from DB: Illegal arguments.", t) + throw t + } + } + if ( + StringUtils.isNotBlank(args(3).asInstanceOf[String]) && args(3) + .asInstanceOf[String] + .equals("updated_time") + ) { + val list = new util.ArrayList[String]() + Constants.DATA_FINISHED_JOB_STATUS_ARRAY.foreach(list.add) + mapper + .searchByCacheAndUpdateTime(id, null, list, new Date(start), new Date(end), null) + .asInstanceOf[util.List[scala.Any]] + } else { + val list = new util.ArrayList[String]() + Constants.DATA_UNFINISHED_JOB_STATUS_ARRAY.foreach(list.add) + mapper + .searchByCache(id, null, list, new Date(start), new Date(end), null) + .asInstanceOf[util.List[scala.Any]] + } + } else { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryDataFetcher. Data: " + args + ) + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala new file mode 100644 index 0000000000..4e36d44d86 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.errorcode + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobHistoryErrCodeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala new file mode 100644 index 0000000000..d354f7f7b0 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrCodeRule.scala @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.errorcode + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.until.CacheUtils + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Monitor the error codes returned by executing tasks. When executing script tasks, the executed + * error codes will be recorded in the database. The service will generate an alarm based on the + * error code recorded in the database. If the error code contains (11001, 11002), the alarm will be + * triggered. + */ +class JobHistoryErrCodeRule(errorCodes: util.Set[String], hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryErrCodeHitEvent, observer = hitObserver) + with Logging { + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + + if (!getHitEvent().isRegistered || null == data) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + d match { + case history: JobHistory => + if (errorCodes.contains(String.valueOf(history.getErrorCode))) { + alertData.add(history) + } + scanRuleList.put("jobHistoryId", history.getId) + case _ => + logger.warn( + "Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName + ) + } + } + } else { + logger.warn("Ignored null scanned data") + } + + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent().notifyObserver(getHitEvent(), alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala new file mode 100644 index 0000000000..7f3d8e10cb --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/errorcode/JobHistoryErrorCodeAlertSender.scala @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.errorcode + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.alert.ims.{ImsAlertDesc, PooledImsAlertUtils} + +import java.util + +import scala.collection.JavaConverters._ + +class JobHistoryErrorCodeAlertSender(alerts: util.Map[String, AlertDesc]) + extends Observer + with Logging { + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryErrCodeHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryErrorCodeAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistoryList || !jobHistoryList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryErrorCodeAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + val toSend = new util.HashMap[String, ImsAlertDesc] + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + val errorCode = String.valueOf(jobHistory.getErrorCode) + if (alerts.containsKey(errorCode) && alerts.get(errorCode).isInstanceOf[ImsAlertDesc]) { + val alert = if (!toSend.containsKey(errorCode)) { + alerts.get(errorCode).asInstanceOf[ImsAlertDesc] + } else { + toSend.get(errorCode) + } + + var newInfo = if (!toSend.containsKey(errorCode)) { + alert.alertInfo + "\n" + + "[error_code] " + jobHistory.getErrorCode + ", " + jobHistory.getErrorDesc + "\n" + } else { + alert.alertInfo + } + newInfo = newInfo + + "[job-info] " + + "submit-user: " + jobHistory.getSubmitUser + ", " + + "execute-user: " + jobHistory.getExecuteUser + ", " + + "engine_type: " + jobHistory.getEngineType + ", " + + "create_time: " + jobHistory.getCreatedTime + ", " + + "instance: " + jobHistory.getInstances + ". \n" + val newNumHit = alert.numHit + 1 + toSend.put(errorCode, alert.copy(alertInfo = newInfo, numHit = newNumHit)) + } else if (!alerts.containsKey(errorCode)) { + logger.warn("Ignored unregistered error code: " + errorCode) + } else if (!alerts.get(errorCode).isInstanceOf[ImsAlertDesc]) { + logger.warn( + "Ignored invalid alertDesc. DataType: " + alerts + .get(errorCode) + .getClass + .getCanonicalName + ) + } + } + } + for ((_, alert) <- toSend.asScala) { + PooledImsAlertUtils.addAlert(alert) + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala new file mode 100644 index 0000000000..0a53142eb7 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedAlertSender.scala @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.jobtime + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.monitor.config.MonitorConfig +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.alert.ims.{ImsAlertDesc, PooledImsAlertUtils} + +import java.text.MessageFormat +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable.ArrayBuffer + +class JobTimeExceedAlertSender(alerts: util.Map[String, AlertDesc]) extends Observer with Logging { + + private val orderedThresholds: Array[Long] = { + val ret = new ArrayBuffer[Long]() + if (alerts != null) { + for (k <- alerts.keySet().asScala) { + Utils.tryCatch(ret.append(k.toLong)) { t => + logger.warn("Ignored illegal threshold: " + k, t) + false + } + } + } + ret.toArray + } + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobTimeExceedHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobTimeExceedAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistoryList || !jobHistoryList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobTimeExceedAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + if (orderedThresholds.length == 0) { + logger.warn("Found none legal threshold, will not send any alert: " + this) + return + } + val toSend = new util.HashMap[String, ImsAlertDesc] + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + val elapse = System.currentTimeMillis() - jobHistory.getCreatedTime.getTime + var ts = 0L + for (t <- orderedThresholds) { // search max threshold that is smaller than elapse + if (elapse >= t) { + ts = t + } else {} + } + val name = ts.toString + val alert = if (!toSend.containsKey(name)) { + alerts + .get(name) + .asInstanceOf[ImsAlertDesc] + } else { + toSend.get(name) + } + + val newInfo = MessageFormat.format( + MonitorConfig.TASK_RUNTIME_TIMEOUT_DESC.getValue, + jobHistory.getId, + (elapse / 1000 / 60 / 60).toString, + jobHistory.getInstances, + MonitorConfig.SOLUTION_URL.getValue + ) + + val newNumHit = alert.numHit + 1 + val receiver = new util.HashSet[String]() + receiver.add(jobHistory.getSubmitUser) + receiver.add(jobHistory.getExecuteUser) + receiver.addAll(alert.alertReceivers) + val ImsAlertDesc = + alert.copy(alertInfo = newInfo, alertReceivers = receiver, numHit = newNumHit) + PooledImsAlertUtils.addAlert(ImsAlertDesc) + + } + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala new file mode 100644 index 0000000000..96c0b4206a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.jobtime + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobTimeExceedHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala new file mode 100644 index 0000000000..f788173e43 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/jobtime/JobTimeExceedRule.scala @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.jobtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.until.CacheUtils + +import java.util +import java.util.Locale + +import scala.collection.JavaConverters._ + +/** + * Monitor the execution status of tasks, scan data outside 12 hours and within 24 hours, If within + * the scope of the rule, there is data whose status is one of (Inited, WaitForRetry, Scheduled, + * Running), an alarm will be triggered. + */ +class JobTimeExceedRule(thresholds: util.Set[String], hitObserver: Observer) + extends AbstractScanRule(event = new JobTimeExceedHitEvent, observer = hitObserver) + with Logging { + + private val threshold: Long = { + if (thresholds == null) { + throw new AnomalyScannerException(21304, "thresholds should not be null") + } + var t = Long.MaxValue + for (k <- thresholds.asScala) { + if (k != null) { + if (t > k.toLong) { + t = k.toLong + } + } else { + logger.warn("ignored null input") + } + } + t + } + + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (!getHitEvent.isRegistered || data == null) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + if (d.isInstanceOf[JobHistory]) { + val jobHistory = d.asInstanceOf[JobHistory] + val status = jobHistory.getStatus.toUpperCase(Locale.getDefault) + if (Constants.UNFINISHED_JOB_STATUS.contains(status)) { + val elapse = System.currentTimeMillis() - jobHistory.getCreatedTime.getTime + if (elapse / 1000 >= threshold) { + alertData.add(d.asInstanceOf[JobHistory]) + } + } + scanRuleList.put("jobhistoryScan", jobHistory.getId) + } else { + logger.warn("Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName) + } + } + } else { + logger.warn("Ignored null scanned data") + } + + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala new file mode 100644 index 0000000000..7876156b1d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsAlertSender.scala @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.labels + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.alert.ims.{PooledImsAlertUtils, UserLabelAlertUtils} +import org.apache.linkis.server.BDPJettyServerHelper + +import java.util + +import scala.collection.JavaConverters._ +import scala.collection.mutable.ArrayBuffer + +class JobHistoryLabelsAlertSender() extends Observer with Logging { + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryLabelsHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryLabelsAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistoryList || !jobHistoryList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryLabelsAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + val toSend = new ArrayBuffer[String] + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + toSend.append(jobHistory.getLabels) + } + } + for (str <- toSend.distinct) { + val labelsMap: util.Map[String, String] = + BDPJettyServerHelper.gson.fromJson(str, classOf[java.util.Map[String, String]]) + val alerts: util.Map[String, AlertDesc] = + UserLabelAlertUtils.getAlerts(Constants.USER_LABEL_MONITOR, labelsMap.get("userCreator")) + PooledImsAlertUtils.addAlert(alerts.get("12010")); + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala new file mode 100644 index 0000000000..d51c3c424b --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.labels + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobHistoryLabelsHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala new file mode 100644 index 0000000000..70d309b883 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/labels/JobHistoryLabelsRule.scala @@ -0,0 +1,112 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.labels + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.until.CacheUtils +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ + +import com.google.common.collect.HashBiMap + +/** + * Scan the execution data within the previous 20 minutes and judge the labels field of the data. + * Judgment based on monitor configuration (linkis.monitor.jobhistory.userLabel.tenant) + */ +class JobHistoryLabelsRule(hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryLabelsHitEvent, observer = hitObserver) + with Logging { + + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (!getHitEvent.isRegistered || null == data) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + if (d.isInstanceOf[JobHistory]) { + logger.info(" start jobhistory user label rule data : {}", d) + val jobHistory = d.asInstanceOf[JobHistory] + val labels = jobHistory.getLabels + val labelsMap: util.Map[String, String] = + BDPJettyServerHelper.gson.fromJson(labels, classOf[java.util.Map[String, String]]) + val userCreator = labelsMap.get("userCreator"); + val tenant = labelsMap.get("tenant"); + if (StringUtils.isNotBlank(userCreator)) { + val configMap = BDPJettyServerHelper.gson.fromJson( + Constants.USER_LABEL_TENANT.getValue, + classOf[java.util.Map[String, String]] + ) + val listIterator = configMap.keySet.iterator + while ({ + listIterator.hasNext + }) { + val next = listIterator.next + if (userCreator.contains(next)) { + val value = configMap.get(next) + if (!value.equals(tenant)) { + alertData.add(d.asInstanceOf[JobHistory]) + } + } + } + if (configMap.values().contains(tenant)) { + val bimap: HashBiMap[String, String] = HashBiMap.create(configMap) + val key = bimap.inverse().get(tenant) + if (!key.contains(userCreator)) { + alertData.add(d.asInstanceOf[JobHistory]) + } + } + } + scanRuleList.put("jobHistoryId", jobHistory.getId) + } else { + logger.warn("Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName) + } + } + } else { + logger.warn("Ignored null scanned data") + } + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala new file mode 100644 index 0000000000..77d904fe4d --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonJobRunTimeRule.scala @@ -0,0 +1,83 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory + +import org.apache.commons.lang3.StringUtils + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Scan the execution data within the first 20 minutes, + * 1. The ObserveInfo field of the data is judged whether it is empty, 2. The task status has been + * completed (Succeed, Failed, Cancelled, Timeout, ALL) Alarms can be triggered when conditions + * are met + */ +class CommonJobRunTimeRule(hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) + with Logging { + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (!getHitEvent.isRegistered || null == data) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + d match { + case jobHistory: JobHistory => + if ( + Constants.FINISHED_JOB_STATUS.contains(jobHistory.getStatus.toUpperCase()) + && StringUtils.isNotBlank(jobHistory.getObserveInfo) + ) { + alertData.add(jobHistory) + } else { + logger.warn("jobHistory is not completely , taskid :" + d) + } + case _ => + } + } + } else { + logger.warn("Ignored null scanned data") + } + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala new file mode 100644 index 0000000000..2380891463 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeAlertSender.scala @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.ims.{MonitorAlertUtils, PooledImsAlertUtils} +import org.apache.linkis.server.BDPJettyServerHelper + +import org.apache.commons.collections.MapUtils + +import java.net.InetAddress +import java.text.SimpleDateFormat +import java.util +import java.util.Date + +import scala.collection.JavaConverters._ + +class CommonRunTimeAlertSender() extends Observer with Logging { + private val dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") + + override def update(e: Event, jobHistoryList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryRunTimeHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryErrorCodeAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (!jobHistoryList.isInstanceOf[util.List[_]] || null == jobHistoryList) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryErrorCodeAlertSender. Input DataType: " + jobHistoryList.getClass.getCanonicalName + ) + } + for (a <- jobHistoryList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + val jobHistory = a.asInstanceOf[JobHistory] + val observeInfoMap = BDPJettyServerHelper.gson.fromJson( + jobHistory.getObserveInfo, + classOf[java.util.Map[String, String]] + ) + val extraMap = MapUtils.getMap(observeInfoMap, "extra") + observeInfoMap.put( + "title", + extraMap + .get("title") + .toString + ",任务id:" + jobHistory.getId + ",执行结果 :" + jobHistory.getStatus + ) + observeInfoMap.put( + "$detail", + extraMap.get("detail").toString + ",执行结果 :" + jobHistory.getStatus + ) + observeInfoMap.put("$submitUser", jobHistory.getSubmitUser) + observeInfoMap.put("$status", jobHistory.getStatus) + observeInfoMap.put("$id", jobHistory.getId.toString) + observeInfoMap.put("$date", dateFormat.format(new Date())) + var alterSysInfo = "" + if (null != extraMap.get("alterSysInfo")) { + alterSysInfo = extraMap.get("alterSysInfo").toString + } + observeInfoMap.put("$sysid", alterSysInfo) + var alterObject = "" + if (null != extraMap.get("alterObject")) { + alterObject = extraMap.get("alterObject").toString + } + observeInfoMap.put("$object", alterObject) + observeInfoMap.put("$ip", InetAddress.getLocalHost.getHostAddress) + observeInfoMap.remove("taskId") + observeInfoMap.remove("extra") + val alters = MonitorAlertUtils.getAlerts(Constants.JOB_RESULT_IM, observeInfoMap) + PooledImsAlertUtils.addAlert(alters.get("12016")) + } + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala new file mode 100644 index 0000000000..8b2f6d41e1 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/CommonRunTimeHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class CommonRunTimeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala new file mode 100644 index 0000000000..a1e870c7c5 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeAlertSender.scala @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.{Event, Observer} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.ims.{MonitorAlertUtils, PooledImsAlertUtils} + +import java.util + +import scala.collection.JavaConverters._ + +/** + * Scan the execution data within the first 20 minutes, judge the completed tasks, + * 1. The parm field in jobhistory contains (task.notification.conditions) 2. If the result of + * executing the task is any one of (Succeed, Failed, Canceled, Timeout, ALL), an alarm will be + * triggered 3.The result of the job is that it has ended The alarm can be triggered if the + * above three conditions are met at the same time + */ +class JobHistoryRunTimeAlertSender() extends Observer with Logging { + + override def update(e: Event, jobHistroyList: scala.Any): Unit = { + if (!e.isInstanceOf[JobHistoryRunTimeHitEvent]) { + throw new AnomalyScannerException( + 21304, + "Wrong event that triggers JobHistoryErrorCodeAlertSender. Input DataType: " + e.getClass.getCanonicalName + ) + } + if (null == jobHistroyList || !jobHistroyList.isInstanceOf[util.List[_]]) { + throw new AnomalyScannerException( + 21304, + "Wrong input for JobHistoryErrorCodeAlertSender. Input DataType: " + jobHistroyList.getClass.getCanonicalName + ) + } + for (a <- jobHistroyList.asInstanceOf[util.List[_]].asScala) { + if (a == null) { + logger.warn("Ignore null input data") + } else if (!a.isInstanceOf[JobHistory]) { + logger.warn("Ignore wrong input data Type : " + a.getClass.getCanonicalName) + } else { + // 您的任务ID 1234 执行完成,最终状态为:成功、失败、取消 + val jobHistory = a.asInstanceOf[JobHistory] + val status = jobHistory.getStatus + val replaceParm: util.HashMap[String, String] = new util.HashMap[String, String] + replaceParm.put("$id", String.valueOf(jobHistory.getId)) + replaceParm.put("$status", status) + replaceParm.put("$alteruser", jobHistory.getSubmitUser) + val alters = MonitorAlertUtils.getAlerts(Constants.JOB_RESULT_IM, replaceParm) + PooledImsAlertUtils.addAlert(alters.get("12015")) + } + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala new file mode 100644 index 0000000000..9daaf0236e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeHitEvent.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.monitor.core.ob.SingleObserverEvent + +class JobHistoryRunTimeHitEvent extends SingleObserverEvent diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala new file mode 100644 index 0000000000..d350bc3ace --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/jobhistory/runtime/JobHistoryRunTimeRule.scala @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.jobhistory.runtime + +import org.apache.linkis.common.utils.Logging +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.core.ob.Observer +import org.apache.linkis.monitor.core.pac.{AbstractScanRule, ScannedData} +import org.apache.linkis.monitor.jobhistory.entity.JobHistory +import org.apache.linkis.monitor.until.CacheUtils +import org.apache.linkis.protocol.utils.TaskUtils +import org.apache.linkis.server.BDPJettyServerHelper + +import java.util + +import scala.collection.JavaConverters._ + +class JobHistoryRunTimeRule(hitObserver: Observer) + extends AbstractScanRule(event = new JobHistoryRunTimeHitEvent, observer = hitObserver) + with Logging { + private val scanRuleList = CacheUtils.cacheBuilder + + /** + * if data match the pattern, return true and trigger observer should call isMatched() + * + * @param data + * @return + */ + override def triggerIfMatched(data: util.List[ScannedData]): Boolean = { + if (null == data || !getHitEvent.isRegistered) { + logger.error("ScanRule is not bind with an observer. Will not be triggered") + return false + } + val alertData: util.List[JobHistory] = new util.ArrayList[JobHistory]() + for (sd <- data.asScala) { + if (sd != null && sd.getData() != null) { + for (d <- sd.getData().asScala) { + d match { + case jobHistory: JobHistory => + if (Constants.FINISHED_JOB_STATUS.contains(jobHistory.getStatus.toUpperCase())) { + val parmsMap: util.Map[String, scala.AnyRef] = BDPJettyServerHelper.gson.fromJson( + jobHistory.getParams, + classOf[util.Map[String, scala.AnyRef]] + ) + val runtimeMap = TaskUtils.getRuntimeMap(parmsMap) + if ( + runtimeMap.containsKey("task.notification.conditions") && + Constants.FINISHED_JOB_STATUS.contains( + String.valueOf(runtimeMap.get("task.notification.conditions")).toUpperCase() + ) + ) { + alertData.add(jobHistory) + } + } else { + logger.warn( + "Ignored wrong input data Type : " + d + ", " + d.getClass.getCanonicalName + ) + } + scanRuleList.put("jobHistoryId", jobHistory.getId) + case _ => + } + } + } else { + logger.warn("Ignored null scanned data") + } + } + logger.info("hit " + alertData.size() + " data in one iteration") + if (alertData.size() > 0) { + getHitEvent.notifyObserver(getHitEvent, alertData) + true + } else { + false + } + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala new file mode 100644 index 0000000000..6f3158e869 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EmsListAction.scala @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +import org.apache.linkis.httpclient.request.GetAction + +import org.apache.commons.lang3.StringUtils + +import scala.collection.mutable.ArrayBuffer + +class EmsListAction extends GetAction with MonitorResourceAction { + + override def suffixURLs: Array[String] = Array("linkisManager", "listAllEMs") + +} + +object EmsListAction { + def newBuilder(): Builder = new Builder + + class Builder private[EmsListAction] () { + private var user: String = _ + private var instance: String = _ + private var nodeHealthy: String = _ + private var owner: String = _ + + def setInstance(instance: String): Builder = { + this.instance = instance + this + } + + def setNodeHealthy(nodeHealthy: String): Builder = { + this.nodeHealthy = nodeHealthy + this + } + + def setOwner(owner: String): Builder = { + this.owner = owner + this + } + + def setUser(user: String): Builder = { + this.user = user + this + } + + def build(): EmsListAction = { + val emsListAction = new EmsListAction + if (StringUtils.isNotBlank(instance)) emsListAction.setParameter("instance", instance) + if (StringUtils.isNotBlank(nodeHealthy)) { + emsListAction.setParameter("nodeHealthy", nodeHealthy) + } + if (StringUtils.isNotBlank(owner)) emsListAction.setParameter("owner", owner) + if (StringUtils.isNotBlank(user)) emsListAction.setUser(user) + emsListAction + } + + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala new file mode 100644 index 0000000000..f3175d802f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/EntranceTaskAction.scala @@ -0,0 +1,77 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +import org.apache.linkis.httpclient.request.GetAction + +import org.apache.commons.lang3.StringUtils + +class EntranceTaskAction extends GetAction with MonitorResourceAction { + override def suffixURLs: Array[String] = Array("entrance/operation/metrics", "taskinfo") +} + +object EntranceTaskAction { + def newBuilder(): Builder = new Builder + + class Builder private[EntranceTaskAction] () { + private var user: String = _ + private var creator: String = _ + private var engineTypeLabel: String = _ + private var instance: String = _ + + def setCreator(creator: String): Builder = { + this.creator = creator + this + } + + def setEngineTypeLabel(engineTypeLabel: String): Builder = { + this.engineTypeLabel = engineTypeLabel + this + } + + def setUser(user: String): Builder = { + this.user = user + this + } + + def setInstance(instance: String): Builder = { + this.instance = instance + this + } + + def build(): EntranceTaskAction = { + val entranceTaskAction = new EntranceTaskAction + if (StringUtils.isNotBlank(creator)) entranceTaskAction.setParameter("creator", creator) + if (StringUtils.isNotBlank(engineTypeLabel)) + entranceTaskAction.setParameter("engineTypeLabel", engineTypeLabel) + if (StringUtils.isNotBlank(instance)) entranceTaskAction.setParameter("instance", instance) + if (StringUtils.isNotBlank(user)) { + // hadoop用户应该获取全部用户entrance信息,则无需传user,即可获取全部entrance信息 + if (user.equals("hadoop")) { + entranceTaskAction.setParameter("user", "") + } else { + entranceTaskAction.setParameter("user", user) + } + } + if (StringUtils.isNotBlank(user)) entranceTaskAction.setUser(user) + entranceTaskAction + } + + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala new file mode 100644 index 0000000000..7ea2001481 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/MonitorResourceAction.scala @@ -0,0 +1,22 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +import org.apache.linkis.httpclient.dws.request.DWSHttpAction + +trait MonitorResourceAction extends DWSHttpAction with UserAction diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala new file mode 100644 index 0000000000..4733a1b45f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/request/UserAction.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.request + +trait UserAction extends org.apache.linkis.httpclient.request.UserAction { + private var user: String = _ + + override def setUser(user: String): Unit = this.user = user + + override def getUser: String = user +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala new file mode 100644 index 0000000000..33c695ca25 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/EntranceTaskResult.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.response + +import org.apache.linkis.httpclient.dws.annotation.DWSHttpMessageResult +import org.apache.linkis.httpclient.dws.response.DWSResult + +import java.util + +import scala.beans.BeanProperty + +@DWSHttpMessageResult("/api/rest_j/v\\d+/entrance/operation/metrics/taskinfo") +class EntranceTaskResult extends DWSResult { + + @BeanProperty + var tasks: util.ArrayList[util.Map[String, Object]] = _ + + @BeanProperty + var totalPage: Int = _ + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala new file mode 100644 index 0000000000..1c12662e2e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/response/MonitorResourceResult.scala @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.response + +import org.apache.linkis.httpclient.dws.response.DWSResult +import org.apache.linkis.httpclient.request.UserAction + +trait MonitorResourceResult extends DWSResult with UserAction { + + private var execID: String = _ + + def getExecID: String = execID + + def setExecID(execID: String): Unit = { + this.execID = execID + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java new file mode 100644 index 0000000000..5c5566c6f9 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/ScanUtils.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils; + + +public class ScanUtils { + public static int getNumOfLines(String str) { + if (str == null || str.length() == 0) { + return 0; + } + int lines = 1; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + lines++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + lines++; + } + } + return lines; + } + + public static int getFirstIndexSkippingLines(String str, Integer lines) { + if (str == null || str.length() == 0 || lines < 0) { + return -1; + } + if (lines == 0) { + return 0; + } + + int curLineIdx = 0; + int len = str.length(); + for (int pos = 0; pos < len; pos++) { + char c = str.charAt(pos); + if (c == '\r') { + curLineIdx++; + if (pos + 1 < len && str.charAt(pos + 1) == '\n') { + pos++; + } + } else if (c == '\n') { + curLineIdx++; + } else { + continue; + } + + if (curLineIdx >= lines) { + return pos + 1; + } + } + return -1; + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala new file mode 100644 index 0000000000..8a3be387e9 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertDesc.scala @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert + +trait AlertDesc { + + /** + * define necessary information for an alert e.g. alert title, alert receiver etc. + */ +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala new file mode 100644 index 0000000000..68ec0f609f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/AlertSender.scala @@ -0,0 +1,29 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert + +trait AlertSender { + + /** + * traverse all registered alertActions and send alert + * + * @return + * true if it is a success + */ + def doSendAlert(alertAction: AlertDesc): Boolean +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala new file mode 100644 index 0000000000..6214b633ac --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/PooledAlertSender.scala @@ -0,0 +1,111 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert + +import org.apache.linkis.common.conf.CommonVars +import org.apache.linkis.common.utils.{Logging, Utils} + +import java.util.concurrent.{Future, LinkedBlockingQueue} +import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} + +abstract class PooledAlertSender extends AlertSender with Logging { + private val THREAD_POOL_SIZE = CommonVars[Int]("linkis.alert.pool.size", 5).getValue + + private val alertDescQ: LinkedBlockingQueue[AlertDesc] = + new LinkedBlockingQueue[AlertDesc](1000) + + protected implicit val executors = + Utils.newCachedExecutionContext(THREAD_POOL_SIZE, "alert-pool-thread-", false) + + private val stopped: AtomicBoolean = new AtomicBoolean(false) + private val runningNumber: AtomicInteger = new AtomicInteger(0) + private var future: Future[_] = _ + + /** + * add an alertDesc to queue + * + * @param alertDesc + * should encapsulates every information an alert platform needs for sending an alarm + */ + def addAlertToPool(alertDesc: AlertDesc): Unit = { + alertDescQ.add(alertDesc) + } + + /** + * describes actual actions for sending an alert + * + * @return + * true if it is a success + */ + override def doSendAlert(alertDesc: AlertDesc): Boolean + + def start(): Unit = { + future = Utils.defaultScheduler.submit(new Runnable() { + override def run() { + logger.info("Pooled alert thread started!") + while (!stopped.get) { + executors synchronized { + while (!stopped.get && runningNumber.get >= THREAD_POOL_SIZE) { + logger.info("Pooled alert thread is full, start waiting") + executors.wait() + } + } + logger.info("Pooled alert thread continue processing") + + if (stopped.get && alertDescQ.size() == 0) return + val alertDesc = Utils.tryQuietly(alertDescQ.take) + if (alertDesc == null) return + executors.submit(new Runnable { + override def run() { + runningNumber.addAndGet(1) + Utils.tryAndWarn { + logger.info("sending alert , information: " + alertDesc) + val ok = doSendAlert(alertDesc) + if (!ok) { + warn("Failed to send alert: " + alertDesc) + } else { + logger.info("successfully send alert: " + alertDesc) + } + runningNumber.decrementAndGet + executors synchronized executors.notify + } + } + }) + } + } + }) + } + + def shutdown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { + logger.info("stopping the Pooled alert thread...") + if (waitComplete) { + val startTime = System.currentTimeMillis() + while ( + (alertDescQ.size() > 0 || runningNumber + .get() > 0) && (timeoutMs == -1 || System.currentTimeMillis() - startTime > timeoutMs) + ) { + Utils.tryQuietly(Thread.sleep(5 * 1000L)) + } + } + executors.shutdown + stopped.set(true) + future.cancel(true) + logger.info("Pooled alert thread is stopped") + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala new file mode 100644 index 0000000000..06ef57f629 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertDesc.scala @@ -0,0 +1,154 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.utils.ScanUtils +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.collections.CollectionUtils +import org.apache.commons.lang3.StringUtils + +import java.util +import java.util.HashSet + +import scala.collection.JavaConverters._ + +import ImsAlertLevel.ImsAlertLevel +import ImsAlertWay.ImsAlertWay + +case class ImsAlertDesc( + var subSystemId: String, + var alertTitle: String, + var alertObj: String, + var alertInfo: String, + alertLevel: ImsAlertLevel = ImsAlertLevel.INFO, + alertIp: String, + canRecover: Int = 0, // 默认0,为1时,需要有对应的恢复告警 + alertWays: util.Set[ImsAlertWay] = new HashSet[ImsAlertWay], + var alertReceivers: util.Set[String] = new HashSet[String], + var numHit: Int = 0, + var hitIntervalMs: Long = 0L +) extends AlertDesc { + + override val toString: String = { + val sb = new StringBuilder + sb.append("sub_system_id=").append(subSystemId).append("&alert_title=").append(alertTitle) + if (alertLevel != null) sb.append("&alert_level=").append(alertLevel.toString) + if (StringUtils.isNotEmpty(alertObj)) sb.append("&alert_obj=").append(alertObj) + if (StringUtils.isNotEmpty(alertInfo)) { + sb.append("&alert_info=") + .append(alertInfo) + .append( + "[freq_info] hit " + numHit + " time(s) within " + hitIntervalMs / 1000 / 60 + " mins" + ) + } + if (canRecover == 0 || canRecover == 1) sb.append("&can_recover=").append(canRecover) + if (alertWays != null && alertWays.size > 0) { + sb.append("&alert_way=") + sb.append(alertWays.asScala.map(_.toString).mkString(",")) + } + if (alertReceivers != null && alertReceivers.size > 0) { + sb.append("&alert_reciver=") + sb.append(alertReceivers.asScala.mkString(",")) + } + if (alertIp != null) { + sb.append("&alert_ip=").append(alertIp) + + } + sb.toString + } + + val toMap: Map[String, String] = { + val map = scala.collection.mutable.Map[String, String]() + map += "sub_system_id" -> subSystemId + map += "alert_title" -> alertTitle + if (alertLevel != null) map += "alert_level" -> alertLevel.toString + if (StringUtils.isNotEmpty(alertObj)) map += "alert_obj" -> alertObj + if (StringUtils.isNotEmpty(alertInfo)) { + map += "alert_info" + "[freq_info] hit " + numHit + " time(s) within " + hitIntervalMs / 1000 / 60 + " mins" -> alertInfo + } + if (canRecover == 0 || canRecover == 1) map += "can_recover" -> canRecover.toString + if (alertWays != null && alertWays.size > 0) { + map += "alert_way" -> alertWays.asScala.map(_.toString).mkString(",") + } + if (alertReceivers != null && alertReceivers.size > 0) { + map += "alert_reciver" -> alertReceivers.asScala.mkString(",") + } + map.toMap + } + + val toImsRequest: ImsRequest = { + val params = validate() + val alertEntity = AlertEntity( + params(0).asInstanceOf[String], + params(1).asInstanceOf[String], + params( + 3 + ) + "[freq_info] hit " + numHit + " time(s) within " + hitIntervalMs / 1000 / 60 + " mins", + alertWays.asScala.map(_.toString).mkString(","), + params(4).asInstanceOf[util.Set[String]].asScala.mkString(","), + alertLevel.toString, + params(2).asInstanceOf[String], + canRecover.toString + ) + + val alertEntityList = new util.ArrayList[AlertEntity] + alertEntityList.add(alertEntity) + + ImsRequest(alertEntityList) + } + + def validate(): Array[Any] = { + assert(StringUtils.isNumeric(subSystemId) && subSystemId.length == 4) + assert(StringUtils.isNotEmpty(alertTitle)) + val newAlertTitle = if (alertTitle.length > 100) { + alertTitle.substring(0, 96) + "... ..." + } else { + alertTitle + } + val newAlertObj = if (StringUtils.isNotEmpty(alertObj) && alertObj.length >= 50) { + alertObj = alertObj.substring(0, 36) + "... ..." + } else { + alertObj + } + val newAlertInfo = + if ( + StringUtils.isNotEmpty(alertInfo) && ScanUtils.getNumOfLines( + alertInfo + ) > Constants.ALERT_IMS_MAX_LINES + ) { + StringUtils.substring( + alertInfo, + 0, + ScanUtils.getFirstIndexSkippingLines(alertInfo, Constants.ALERT_IMS_MAX_LINES) + ) + "... ...\n" + } else { + alertInfo + } + val newAlertReceivers = + if (CollectionUtils.isNotEmpty(alertReceivers) && alertReceivers.size > 15) { + alertReceivers.asScala.take(15) + } else { + alertReceivers + } + + Array(subSystemId, newAlertTitle, newAlertObj, newAlertInfo, newAlertReceivers) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala new file mode 100644 index 0000000000..10801de03e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertLevel.scala @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +object ImsAlertLevel extends Enumeration { + type ImsAlertLevel = Value + val INFO = Value("5") + val WARN = Value("4") + val MINOR = Value("3") + val MAJOR = Value("2") + val CRITICAL = Value("1") + val CLEAR = Value("0") +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala new file mode 100644 index 0000000000..1166453b87 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertPropFileData.scala @@ -0,0 +1,30 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import com.fasterxml.jackson.annotation.JsonProperty + +case class ImsAlertPropFileData( + @JsonProperty("alert_title") alertTitle: String, + @JsonProperty("alert_info") alertInfo: String, + @JsonProperty("alert_way") alertWays: String, + @JsonProperty("alert_reciver") alertReceivers: String, + @JsonProperty("alert_level") alertLevel: String, + @JsonProperty("alert_obj") alertObj: String, + @JsonProperty("can_recover") canRecover: String +) diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala new file mode 100644 index 0000000000..7f26c705a0 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsAlertWay.scala @@ -0,0 +1,26 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +object ImsAlertWay extends Enumeration { + type ImsAlertWay = Value + val NoAlert = Value("0") + val WXWork = Value("1") + val Email = Value("2") + val WeChat = Value("3") +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala new file mode 100644 index 0000000000..e497b1a41a --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/ImsRequest.scala @@ -0,0 +1,35 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import java.util + +import com.fasterxml.jackson.annotation.JsonProperty + +case class ImsRequest(@JsonProperty("alertList") alertList: util.List[AlertEntity]) + +case class AlertEntity( + @JsonProperty("sub_system_id") subSystemId: String, + @JsonProperty("alert_title") alertTitle: String, + @JsonProperty("alert_info") alertInfo: String, + @JsonProperty("alert_way") alertWays: String, + @JsonProperty("alert_reciver") alertReceivers: String, + @JsonProperty("alert_level") alertLevel: String, + @JsonProperty("alert_obj") alertObj: String, + @JsonProperty("can_recover") canRecover: String +) diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala new file mode 100644 index 0000000000..67c1b0358f --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/MonitorAlertUtils.scala @@ -0,0 +1,157 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + +import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamReader} +import java.text.SimpleDateFormat +import java.util +import java.util.Properties + +import scala.collection.JavaConverters._ + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +object MonitorAlertUtils extends Logging { + + private val mapper = { + val ret = new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) + ret.registerModule(DefaultScalaModule) + ret.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) + ret + } + + val properties = { + val url = getClass.getClassLoader.getResource(Constants.ALERT_PROPS_FILE_PATH) + if (url == null) { + throw new AnomalyScannerException( + 21304, + "Failed to load alerts from alert properties. Alert properties file does not exist: " + Constants.ALERT_PROPS_FILE_PATH + ) + } + logger.info("reading alert properties from: " + url.getFile) + val properties = new Properties() + var inputStream: InputStream = null + var reader: InputStreamReader = null + var buff: BufferedReader = null + Utils.tryFinally { + Utils.tryCatch { + inputStream = new FileInputStream(new File(url.getFile)) + reader = new InputStreamReader(inputStream, "UTF-8") + buff = new BufferedReader(reader) + properties.load(buff) + } { t => + { + throw new AnomalyScannerException( + 21304, + "Failed to load alerts from alert properties. Cause: " + ExceptionUtils.getMessage(t) + ) + } + } + } { + IOUtils.closeQuietly(buff) + IOUtils.closeQuietly(reader) + IOUtils.closeQuietly(inputStream) + } + properties.asScala + } + + def getAlerts(prefix: String, params: util.Map[String, String]): util.Map[String, AlertDesc] = { + val ret = new util.HashMap[String, AlertDesc]() + + for ((k: String, v: String) <- properties) { + if (ret.containsKey(k)) { + logger.warn("found duplicate key in alert properties, accept only the first one") + } else if (StringUtils.startsWith(k, prefix)) { + val data = mapper.readValue(v, classOf[ImsAlertPropFileData]) + var alertInfo = new String( + new StringBuilder().append(data.alertInfo).toString().getBytes(), + "utf-8" + ).replace("$name", data.alertReceivers) + val interator = params.keySet.iterator + while (interator.hasNext) { + val key = interator.next + val value = params.get(key) + alertInfo = alertInfo.replace(key, value) + } + val receivers = { + val set: util.Set[String] = new util.HashSet[String] + if (StringUtils.isNotBlank(data.alertReceivers)) { + data.alertReceivers.split(",").map(r => set.add(r)) + } + if (!params.containsKey("$alteruser")) { + Constants.ALERT_DEFAULT_RECEIVERS.foreach(e => { + if (StringUtils.isNotBlank(e)) { + set.add(e) + } + }) + } else { + set.add(params.get("$alteruser")) + } + if (StringUtils.isNotBlank(params.get("receiver"))) { + params.get("receiver").split(",").map(r => set.add(r)) + } + set + } + + val subSystemId = params.getOrDefault("subSystemId", Constants.ALERT_SUB_SYSTEM_ID) + val alertTitle = params.getOrDefault("title", data.alertTitle) + val alertLevel = + if (StringUtils.isNotBlank(data.alertLevel)) { + ImsAlertLevel.withName(params.getOrDefault("monitorLevel", data.alertLevel)) + } else { + ImsAlertLevel.withName(params.getOrDefault("monitorLevel", ImsAlertLevel.WARN.toString)) + } + + val alertDesc = Utils.tryAndWarn( + ImsAlertDesc( + subSystemId, + alertTitle, + data.alertObj, + alertInfo, + alertLevel, + null, + 0, { + val set: util.Set[ImsAlertWay.Value] = new util.HashSet[ImsAlertWay.Value] + if (StringUtils.isNotBlank(data.alertWays)) { + data.alertWays + .split(",") + .map(alertWayStr => set.add(ImsAlertWay.withName(alertWayStr))) + } + set + }, + receivers + ) + ) + val realK = StringUtils.substringAfter(k, prefix) + ret.put(realK, alertDesc) + } + } + ret + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala new file mode 100644 index 0000000000..70bd15567e --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertSender.scala @@ -0,0 +1,104 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.monitor.utils.alert.{AlertDesc, PooledAlertSender} +import org.apache.linkis.monitor.utils.alert.AlertDesc +import org.apache.linkis.monitor.utils.log.LogUtils + +import org.apache.http.client.config.RequestConfig +import org.apache.http.client.methods.HttpPost +import org.apache.http.entity.{ContentType, StringEntity} +import org.apache.http.impl.client.HttpClients +import org.apache.http.util.EntityUtils + +import java.text.SimpleDateFormat +import java.util + +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +class PooledImsAlertSender(alertUrl: String) extends PooledAlertSender with Logging { + + protected val httpClient = HttpClients.createDefault + + private val mapper = + new ObjectMapper().setDateFormat(new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssZ")) + + /** + * describes actual actions for sending an alert + * + * @return + * true if it is a success + */ + + /** + * describes actual actions for sending an alert + * + * @return + * true if it is a success + */ + override def doSendAlert(alertDesc: AlertDesc): Boolean = { + if (!alertDesc.isInstanceOf[ImsAlertDesc]) { + logger.warn("wrong alertDesc dataType: " + alertDesc.getClass.getCanonicalName) + return false + } + logger.info("sending an alert to IMS, information: " + alertDesc) + val imsRequest = alertDesc.asInstanceOf[ImsAlertDesc].toImsRequest + + mapper.registerModule(DefaultScalaModule) + val paramContent = Utils.tryCatch(mapper.writeValueAsString(imsRequest)) { t => + logger.warn("ignore alert: " + imsRequest, t) + return false + } + if (paramContent.isEmpty) { + logger.warn("alertParams is empty, will not send alarm") + return false + } + + val requestConfig = RequestConfig.DEFAULT + + val entity = new StringEntity( + paramContent, + ContentType.create(ContentType.APPLICATION_JSON.getMimeType, "UTF-8") + ) + entity.setContentEncoding("UTF-8") + + val httpPost = new HttpPost(alertUrl) + + httpPost.setConfig(requestConfig) + httpPost.setEntity(entity) + + val response = Utils.tryAndErrorMsg(httpClient.execute(httpPost))("send alert to IMS failed") + + if (response != null) { + val responseInfo = EntityUtils.toString(response.getEntity, "UTF-8") + logger.info("Alert: " + paramContent + "Response: " + responseInfo) + LogUtils.stdOutLogger.info("Alert: " + paramContent + "Response: " + responseInfo) + if (response.getStatusLine.getStatusCode == 200) return true + } + false + } + + override def shutdown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { + super.shutdown(waitComplete, timeoutMs) + httpClient.close + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala new file mode 100644 index 0000000000..f7917a9e15 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/PooledImsAlertUtils.scala @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.collections.CollectionUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + +import java.net.InetAddress +import java.util +import java.util.HashSet + +import scala.collection.JavaConverters._ + +import ImsAlertWay.ImsAlertWay + +object PooledImsAlertUtils extends Logging { + + private val sender: PooledImsAlertSender = { + val ret = new PooledImsAlertSender(Constants.ALERT_IMS_URL) + ret.start() + ret + } + + private val localIp = InetAddress.getLocalHost.getHostAddress + + def addAlertAndLogException(message: String): Unit = Utils.tryAndError(addAlert(message)) + + def addAlert(message: String): Unit = addExceptionAlert(message, null, null) + + def addExceptionAlert(message: String, t: Throwable): Unit = + addExceptionAlert(message, t, null) + + def addExceptionAlertAndLogException(message: String, t: Throwable): Unit = + Utils.tryAndError(addExceptionAlert(message, t, null)) + + def addExceptionAlert(message: String, t: Throwable, alertWays: util.Set[ImsAlertWay]): Unit = { + val alertObj = + if (StringUtils.isEmpty(message) && t != null) t.getMessage + else if (StringUtils.isEmpty(message)) { + throw new NullPointerException("both message and exception are null!") + } else { + message + } + val _alertWays = + if (CollectionUtils.isNotEmpty(alertWays)) alertWays else new HashSet[ImsAlertWay]() + val (alertInfo, alertLevel) = if (t != null) { + _alertWays.add(ImsAlertWay.Email) + _alertWays.add(ImsAlertWay.WXWork) + _alertWays.add(ImsAlertWay.WeChat) + (ExceptionUtils.getRootCauseMessage(t), ImsAlertLevel.MAJOR) + } else { + _alertWays.add(ImsAlertWay.WXWork) + (message, ImsAlertLevel.WARN) + } + val alertDesc = new ImsAlertDesc( + Constants.ALERT_SUB_SYSTEM_ID, + "BDP Alert", + alertObj, + alertInfo, + alertLevel, + localIp, + 0, + _alertWays + ) + addAlert(alertDesc) + } + + def addAlert(alertDesc: AlertDesc): Unit = { + if (!alertDesc.isInstanceOf[ImsAlertDesc]) { + logger.warn("Ignore wrong alertDesc. DataType: " + alertDesc.getClass.getCanonicalName) + } else { + sender.addAlertToPool(alertDesc) + logger.info("successfully added alert") + } + } + + def addAlertAndLogException(alertDesc: ImsAlertDesc): Unit = + Utils.tryAndError(addAlert(alertDesc)) + + def clearAlert(alertDesc: ImsAlertDesc): Unit = { + assert(alertDesc.canRecover == 1) + assert(alertDesc.alertLevel == ImsAlertLevel.CLEAR) + sender.addAlertToPool(alertDesc) + } + + def shutDown(waitComplete: Boolean = true, timeoutMs: Long = -1): Unit = { + sender.shutdown(waitComplete, timeoutMs) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala new file mode 100644 index 0000000000..64a587a21c --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/alert/ims/UserLabelAlertUtils.scala @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert.ims + +import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils} +import org.apache.linkis.monitor.constants.Constants +import org.apache.linkis.monitor.jobhistory.exception.AnomalyScannerException +import org.apache.linkis.monitor.utils.alert.AlertDesc + +import org.apache.commons.io.IOUtils +import org.apache.commons.lang3.StringUtils +import org.apache.commons.lang3.exception.ExceptionUtils + +import java.io.{BufferedReader, File, FileInputStream, InputStream, InputStreamReader} +import java.text.SimpleDateFormat +import java.util +import java.util.Properties + +import scala.collection.JavaConverters._ + +import com.fasterxml.jackson.databind.{DeserializationFeature, ObjectMapper} +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +object UserLabelAlertUtils extends Logging { + + def getAlerts(prefix: String, userCreator: String): util.Map[String, AlertDesc] = { + val replaceParams: util.HashMap[String, String] = new util.HashMap[String, String] + replaceParams.put("$userCreator", userCreator) + MonitorAlertUtils.getAlerts(prefix, replaceParams) + } + +} diff --git a/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala new file mode 100644 index 0000000000..b63a690d24 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/main/scala/org/apache/linkis/monitor/utils/log/LogUtils.scala @@ -0,0 +1,24 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.log + +import org.slf4j.LoggerFactory + +object LogUtils { + val stdOutLogger = LoggerFactory.getLogger("PlaintTextConsoleLogger") +} diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java new file mode 100644 index 0000000000..2b82037377 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertSenderTest.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert; + +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertDesc; +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertLevel; +import org.apache.linkis.monitor.utils.alert.ims.ImsAlertWay; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertSender; +import org.apache.linkis.server.utils.LinkisMainHelper; + +import java.util.HashSet; +import java.util.Set; + +public class PooledImsAlertSenderTest { + // @Before + public void before() { + System.getProperties().setProperty(LinkisMainHelper.SERVER_NAME_KEY(), "linkis-et-monitor"); + System.getProperties() + .setProperty("log4j.configurationFile", "src/test/resources/log4j2-console.xml"); + // System.getProperties().setProperty("wds.linkis.server.conf", + // "linkis-et-monitor.properties"); + } + + // @org.junit.Test + public void doSendAlert() throws Exception { + Set ways = new HashSet<>(); + ways.add(ImsAlertWay.WeChat()); + ways.add(ImsAlertWay.Email()); + + Set receivers = new HashSet<>(); + receivers.add("shangda, johnnwang"); + ImsAlertDesc desc = + new ImsAlertDesc( + "5435", + "linkis_alert_test", + "linkis_alert", + "this is a test for linkis", + ImsAlertLevel.MINOR(), + "10.127.0.0.1", + 0, + ways, + receivers, + 3, + 12); + + System.out.println(desc); + String url = "http://172.21.0.130:10812/ims_data_access/send_alarm_by_json.do"; + + PooledImsAlertSender sender = new PooledImsAlertSender(url); + sender.doSendAlert(desc); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java new file mode 100644 index 0000000000..c0798168aa --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/test/java/org/apache/linkis/monitor/utils/alert/PooledImsAlertUtilsTest.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.linkis.monitor.utils.alert; + +import org.apache.linkis.monitor.constants.Constants; +import org.apache.linkis.monitor.utils.alert.ims.MonitorAlertUtils; +import org.apache.linkis.monitor.utils.alert.ims.PooledImsAlertUtils; +import org.apache.linkis.server.utils.LinkisMainHelper; + +import java.util.Map; + +public class PooledImsAlertUtilsTest { + // @Before + public void before() { + System.getProperties().setProperty(LinkisMainHelper.SERVER_NAME_KEY(), "linkis-et-monitor"); + System.getProperties() + .setProperty("log4j.configurationFile", "src/test/resources/log4j2-console.xml"); + // System.getProperties().setProperty("wds.linkis.server.conf", + // "linkis-et-monitor.properties"); + } + + // @Test + public void addAlert() throws Exception { + PooledImsAlertUtils.addAlert("1st test"); + Map alerts = + MonitorAlertUtils.getAlerts((Constants.SCAN_PREFIX_ERRORCODE()), null); + for (Map.Entry kv : alerts.entrySet()) { + System.out.println(kv.getKey() + ": " + kv.getValue().toString()); + PooledImsAlertUtils.addAlert(kv.getValue()); + } + Thread.sleep(2000l); + PooledImsAlertUtils.shutDown(true, -1); + } +} diff --git a/linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml b/linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml new file mode 100644 index 0000000000..49eabc51e6 --- /dev/null +++ b/linkis-extensions/linkis-et-monitor/src/test/resources/log4j2-console.xml @@ -0,0 +1,46 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/linkis-extensions/pom.xml b/linkis-extensions/pom.xml index 661b6be0bf..7233141565 100644 --- a/linkis-extensions/pom.xml +++ b/linkis-extensions/pom.xml @@ -28,6 +28,7 @@ pom linkis-io-file-client + linkis-et-monitor