Merge pull request #908 from WeBankFinTech/dev-1.0.1
[release] Prepare for release Linkis-1.0.1
diff --git a/assembly-combined-package/assembly-combined/conf/log4j2.xml b/assembly-combined-package/assembly-combined/conf/log4j2.xml
index 7e28259..b86b292 100644
--- a/assembly-combined-package/assembly-combined/conf/log4j2.xml
+++ b/assembly-combined-package/assembly-combined/conf/log4j2.xml
@@ -28,6 +28,16 @@
<root level="INFO">
<appender-ref ref="RollingFile"/>
</root>
+ <logger name="com.webank.wedatasphere.linkis.message.builder.DefaultMessageJob" level="warn" additivity="true">
+ <appender-ref ref="RollingFile"/>
+ </logger>
+
+ <logger name="com.webank.wedatasphere.linkis.message.scheduler.DefaultMessageExecutor" level="warn" additivity="true">
+ <appender-ref ref="RollingFile"/>
+ </logger>
+ <logger name="com.netflix.loadbalancer.DynamicServerListLoadBalancer" level="warn" additivity="true">
+ <appender-ref ref="RollingFile"/>
+ </logger>
</loggers>
</configuration>
diff --git a/assembly-combined-package/assembly-combined/pom.xml b/assembly-combined-package/assembly-combined/pom.xml
index b0e7d97..c1f0694 100644
--- a/assembly-combined-package/assembly-combined/pom.xml
+++ b/assembly-combined-package/assembly-combined/pom.xml
@@ -18,7 +18,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/assembly-combined-package/assembly-combined/public-module-combined/pom.xml b/assembly-combined-package/assembly-combined/public-module-combined/pom.xml
index 85ac93b..e77dcab 100644
--- a/assembly-combined-package/assembly-combined/public-module-combined/pom.xml
+++ b/assembly-combined-package/assembly-combined/public-module-combined/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/assembly-combined-package/bin/install-io.sh b/assembly-combined-package/bin/install-io.sh
deleted file mode 100644
index b971cd3..0000000
--- a/assembly-combined-package/bin/install-io.sh
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/bin/sh
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#Actively load user env
-
-workDir=$1
-if [ "$workDir" == "" ];then
- echo "workDir is empty using shell dir"
- shellDir=`dirname $0`
- workDir=`cd ${shellDir}/..;pwd`
-fi
-echo "workDir is $workDir"
-
-source ${workDir}/bin/common.sh
-## copy io-entrance lib
-export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/conf/config.sh"}
-source ${LINKIS_CONFIG_PATH}
-
-rm -rf $workDir/share/linkis/ujes/io/linkis-ujes-io-entrance-bak;mv -f $workDir/share/linkis/ujes/io/linkis-ujes-io-entrance $workDir/share/linkis/ujes/io/linkis-ujes-io-entrance-bak
-cd $workDir/share/linkis/ujes/io/;unzip -o $workDir/share/linkis/ujes/io/linkis-ujes-io-entrance.zip > /dev/null
-isSuccess "unzip linkis-ujes-io-entrance.zip"
-cd -
-
-function copyEntrance(){
- copyFile "$SERVER_IP" "${workDir}/share/linkis/ujes/io/linkis-ujes-io-entrance/lib/linkis-io-entrance-${LINKIS_VERSION}.jar" "$SERVER_HOME/$SERVER_NAME/lib/"
- copyFile "$SERVER_IP" "${workDir}/share/linkis/ujes/io/linkis-ujes-io-entrance/lib/linkis-entrance-client-${LINKIS_VERSION}.jar" "$SERVER_HOME/$SERVER_NAME/lib/"
- isSuccess "copy io-entrance to $SERVER_NAME "
-}
-
-SERVER_HOME=$LINKIS_INSTALL_HOME
-
-
-SERVER_NAME=linkis-publicservice
-SERVER_IP=$PUBLICSERVICE_INSTALL_IP
-copyEntrance
-
-SERVER_NAME=linkis-ujes-python-entrance
-SERVER_IP=$PYTHON_INSTALL_IP
-copyEntrance
-
-SERVER_NAME=linkis-ujes-hive-entrance
-SERVER_IP=$HIVE_INSTALL_IP
-copyEntrance
-
-SERVER_NAME=linkis-ujes-spark-entrance
-SERVER_IP=$SPARK_INSTALL_IP
-copyEntrance
-
-
-SERVER_NAME=linkis-ujes-shell-entrance
-SERVER_IP=$SHELL_INSTALL_IP
-copyEntrance
-
-SERVER_NAME=linkis-ujes-jdbc-entrance
-SERVER_IP=$JDBC_INSTALL_IP
-copyEntrance
\ No newline at end of file
diff --git a/assembly-combined-package/config/linkis-env.sh b/assembly-combined-package/config/linkis-env.sh
index 27d8340..d3f4042 100644
--- a/assembly-combined-package/config/linkis-env.sh
+++ b/assembly-combined-package/config/linkis-env.sh
@@ -48,9 +48,10 @@
#RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
### Provide the DB information of Hive metadata database.
-HIVE_META_URL=
-HIVE_META_USER=
-HIVE_META_PASSWORD=
+### Attention! If there are special characters like "&", they need to be enclosed in quotation marks.
+HIVE_META_URL=""
+HIVE_META_USER=""
+HIVE_META_PASSWORD=""
##YARN REST URL spark engine required
YARN_RESTFUL_URL=http://127.0.0.1:8088
@@ -129,7 +130,7 @@
##The decompression directory and the installation directory need to be inconsistent
LINKIS_HOME=/appcom/Install/LinkisInstall
-LINKIS_VERSION=1.0.0
+LINKIS_VERSION=1.0.1
# for install
LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module
\ No newline at end of file
diff --git a/assembly-combined-package/pom.xml b/assembly-combined-package/pom.xml
index bcba619..a205d7b 100644
--- a/assembly-combined-package/pom.xml
+++ b/assembly-combined-package/pom.xml
@@ -18,7 +18,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/bin/checkEnv.sh b/bin/checkEnv.sh
deleted file mode 100644
index d51bd5c..0000000
--- a/bin/checkEnv.sh
+++ /dev/null
@@ -1,45 +0,0 @@
-#!/bin/sh
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-say() {
- printf 'check command fail \n %s\n' "$1"
-}
-
-err() {
- say "$1" >&2
- exit 1
-}
-
-check_cmd() {
- command -v "$1" > /dev/null 2>&1
-}
-
-need_cmd() {
- if ! check_cmd "$1"; then
- err "need '$1' (your linux command not found)"
- fi
-}
-echo "<-----start to check used cmd---->"
-need_cmd yum
-need_cmd java
-need_cmd mysql
-need_cmd unzip
-need_cmd expect
-need_cmd telnet
-need_cmd tar
-need_cmd sed
-need_cmd dos2unix
-echo "<-----end to check used cmd---->"
diff --git a/bin/checkServices.sh b/bin/checkServices.sh
deleted file mode 100644
index 561277b..0000000
--- a/bin/checkServices.sh
+++ /dev/null
@@ -1,79 +0,0 @@
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-#!/bin/sh
-source ~/.bash_profile
-
-shellDir=`dirname $0`
-workDir=`cd ${shellDir}/..;pwd`
-
-##load config
-#source ${workDir}/conf/config.sh
-export LINKIS_DSS_CONF_FILE=${LINKIS_DSS_CONF_FILE:-"${workDir}/conf/config.sh"}
-export DISTRIBUTION=${DISTRIBUTION:-"${workDir}/conf/config.sh"}
-source ${LINKIS_DSS_CONF_FILE}
-source ${DISTRIBUTION}
-MICRO_SERVICE_NAME=$1
-MICRO_SERVICE_IP=$2
-MICRO_SERVICE_PORT=$3
-
-local_host="`hostname --fqdn`"
-
-ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
-
-function isLocal(){
- if [ "$1" == "127.0.0.1" ];then
- return 0
- elif [ $1 == "localhost" ]; then
- return 0
- elif [ $1 == $local_host ]; then
- return 0
- elif [ $1 == $ipaddr ]; then
- return 0
- fi
- return 1
-}
-
-function executeCMD(){
- isLocal $1
- flag=$?
- echo "Is local "$flag
- if [ $flag == "0" ];then
- eval $2
- else
- ssh -p $SSH_PORT $1 $2
- fi
-
-}
-
-echo "Start to Check if your microservice:$MICRO_SERVICE_NAME is normal via telnet"
-echo "--------------------------------------------------------------------------------------------------------------------------"
-echo $MICRO_SERVICE_NAME
-echo $MICRO_SERVICE_IP
-echo $MICRO_SERVICE_PORT
-echo "--------------------------------------------------------------------------------------------------------------------------"
-result=`echo -e "\n" | telnet $MICRO_SERVICE_IP $MICRO_SERVICE_PORT 2>/dev/null | grep Connected | wc -l`
-if [ $result -eq 1 ]; then
- echo "$MICRO_SERVICE_NAME is ok."
-else
- echo "ERROR your $MICRO_SERVICE_NAME microservice is not start successful !!! ERROR logs as follows :"
- echo "PLEAESE CHECK DETAIL LOG,LOCATION:$LINKIS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
- echo '<---------------------------------------------------->'
- executeCMD $MICRO_SERVICE_IP "tail -n 50 $LINKIS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/*.out"
- echo '<---------------------------------------------------->'
- echo "PLEAESE CHECK DETAIL LOG,LOCATION:$LINKIS_INSTALL_HOME/$MICRO_SERVICE_NAME/logs/linkis.out"
- exit 1
-fi
-
diff --git a/bin/common.sh b/bin/common.sh
deleted file mode 100644
index 49d5cf7..0000000
--- a/bin/common.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/bin/sh
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#Actively load user env
-source ~/.bash_profile
-
-local_host="`hostname --fqdn`"
-
-ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
-
-function isLocal(){
- if [ "$1" == "127.0.0.1" ];then
- return 0
- elif [ "$1" == "" ]; then
- return 0
- elif [ "$1" == "localhost" ]; then
- return 0
- elif [ "$1" == $local_host ]; then
- return 0
- elif [ "$1" == $ipaddr ]; then
- return 0
- fi
- return 1
-}
-
-function executeCMD(){
- isLocal $1
- flag=$?
- if [ $flag == "0" ];then
- echo "Is local execution:$2"
- eval $2
- else
- echo "Is remote execution:$2"
- ssh -p $SSH_PORT $1 $2
- fi
-
-}
-function copyFile(){
- isLocal $1
- flag=$?
- src=$2
- dest=$3
- if [ $flag == "0" ];then
- echo "Is local cp "
- cp -r "$src" "$dest"
- else
- echo "Is remote cp "
- scp -r -P $SSH_PORT "$src" $1:"$dest"
- fi
-
-}
-
-function isSuccess(){
-if [ $? -ne 0 ]; then
- echo "Failed to " + $1
- exit 1
-else
- echo "Succeed to" + $1
-fi
-}
\ No newline at end of file
diff --git a/bin/install.sh b/bin/install.sh
deleted file mode 100644
index be8cfee..0000000
--- a/bin/install.sh
+++ /dev/null
@@ -1,356 +0,0 @@
-#!/bin/sh
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#Actively load user env
-source ~/.bash_profile
-shellDir=`dirname $0`
-workDir=`cd ${shellDir}/..;pwd`
-
-#To be compatible with MacOS and Linux
-txt=""
-if [[ "$OSTYPE" == "darwin"* ]]; then
- txt="''"
-elif [[ "$OSTYPE" == "linux-gnu" ]]; then
- # linux
- txt=""
-elif [[ "$OSTYPE" == "cygwin" ]]; then
- echo "linkis not support Windows operating system"
- exit 1
-elif [[ "$OSTYPE" == "msys" ]]; then
- echo "linkis not support Windows operating system"
- exit 1
-elif [[ "$OSTYPE" == "win32" ]]; then
- echo "linkis not support Windows operating system"
- exit 1
-elif [[ "$OSTYPE" == "freebsd"* ]]; then
-
- txt=""
-else
- echo "Operating system unknown, please tell us(submit issue) for better service"
- exit 1
-fi
-
-function isSuccess(){
-if [ $? -ne 0 ]; then
- echo "Failed to " + $1
- exit 1
-else
- echo "Succeed to" + $1
-fi
-}
-
-function checkPythonAndJava(){
- python --version
- isSuccess "execute python --version"
- java -version
- isSuccess "execute java --version"
-}
-
-function checkHadoopAndHive(){
- hadoopVersion="`hdfs version`"
- defaultHadoopVersion="2.7"
- checkversion "$hadoopVersion" $defaultHadoopVersion hadoop
- checkversion "$(whereis hive)" "1.2" hive
-}
-
-function checkversion(){
-versionStr=$1
-defaultVersion=$2
-module=$3
-
-result=$(echo $versionStr | grep "$defaultVersion")
-if [ -n "$result" ]; then
- echo "$module version match"
-else
- echo "WARN: Your $module version is not $defaultVersion, there may be compatibility issues:"
- echo " 1: Continue installation, there may be compatibility issues"
- echo " 2: Exit installation"
- echo ""
- read -p "Please input the choice:" idx
- if [[ '2' = "$idx" ]];then
- echo "You chose Exit installation"
- exit 1
- fi
-fi
-}
-
-function checkSpark(){
- spark-submit --version
- isSuccess "execute spark-submit --version"
-}
-
-say() {
- printf 'check command fail \n %s\n' "$1"
-}
-
-err() {
- say "$1" >&2
- exit 1
-}
-
-check_cmd() {
- command -v "$1" > /dev/null 2>&1
-}
-
-need_cmd() {
- if ! check_cmd "$1"; then
- err "need '$1' (command not found)"
- fi
-}
-
-
-
-sh ${workDir}/bin/checkEnv.sh
-isSuccess "check env"
-
-##load config
-echo "step1:load config "
-export LINKIS_CONFIG_PATH=${LINKIS_CONFIG_PATH:-"${workDir}/conf/linkis-env.sh"}
-export LINKIS_DB_CONFIG_PATH=${LINKIS_DB_CONFIG_PATH:-"${workDir}/conf/db.sh"}
-source ${LINKIS_CONFIG_PATH}
-source ${LINKIS_DB_CONFIG_PATH}
-isSuccess "load config"
-local_host="`hostname --fqdn`"
-
-ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
-
-function isLocal(){
- if [ "$1" == "127.0.0.1" ];then
- return 0
- elif [ $1 == "localhost" ]; then
- return 0
- elif [ $1 == $local_host ]; then
- return 0
- elif [ $1 == $ipaddr ]; then
- return 0
- fi
- return 1
-}
-
-function executeCMD(){
- isLocal $1
- flag=$?
- if [ $flag == "0" ];then
- echo "Is local execution:$2"
- eval $2
- else
- echo "Is remote execution:$2"
- ssh -p $SSH_PORT $1 $2
- fi
-
-}
-function copyFile(){
- isLocal $1
- flag=$?
- src=$2
- dest=$3
- if [ $flag == "0" ];then
- echo "Is local cp "
- eval "cp -r $src $dest"
- else
- echo "Is remote cp "
- scp -r -P $SSH_PORT $src $1:$dest
- fi
-
-}
-
-
-##env check
-echo "Do you want to clear Linkis table information in the database?"
-echo " 1: Do not execute table-building statements"
-echo " 2: Dangerous! Clear all data and rebuild the tables"
-echo " other: exit"
-echo ""
-
-MYSQL_INSTALL_MODE=1
-
-#使用read参数[-p]后,允许在[-p]后面跟一字符串,在字符串后面跟n个shell变量。n个shell变量用来接收从shell界面输入的字符串
-read -p "Please input the choice:" idx
-if [[ '2' = "$idx" ]];then
- MYSQL_INSTALL_MODE=2
- echo "You chose Rebuild the table"
-elif [[ '1' = "$idx" ]];then
- MYSQL_INSTALL_MODE=1
- echo "You chose not execute table-building statements"
-else
- echo "no choice,exit!"
- exit 1
-fi
-
-echo "create hdfs directory and local directory"
-if [ "$WORKSPACE_USER_ROOT_PATH" != "" ]
-then
- localRootDir=$WORKSPACE_USER_ROOT_PATH
- if [[ $WORKSPACE_USER_ROOT_PATH == file://* ]];then
- localRootDir=${WORKSPACE_USER_ROOT_PATH#file://}
- mkdir -p $localRootDir/$deployUser
- sudo chmod -R 775 $localRootDir/$deployUser
- elif [[ $WORKSPACE_USER_ROOT_PATH == hdfs://* ]];then
- localRootDir=${WORKSPACE_USER_ROOT_PATH#hdfs://}
- hdfs dfs -mkdir -p $localRootDir/$deployUser
- hdfs dfs -chmod -R 775 $localRootDir/$deployUser
- else
- echo "does not support $WORKSPACE_USER_ROOT_PATH filesystem types"
- fi
-fi
-isSuccess "create $WORKSPACE_USER_ROOT_PATH directory"
-
-
-######################## init hdfs and db ################################
- if [ "$HDFS_USER_ROOT_PATH" != "" ]
- then
- localRootDir=$HDFS_USER_ROOT_PATH
- if [[ $HDFS_USER_ROOT_PATH == file://* ]];then
- localRootDir=${HDFS_USER_ROOT_PATH#file://}
- mkdir -p $localRootDir/$deployUser
- sudo chmod -R 775 $localRootDir/$deployUser
- elif [[ $HDFS_USER_ROOT_PATH == hdfs://* ]];then
- localRootDir=${HDFS_USER_ROOT_PATH#hdfs://}
- hdfs dfs -mkdir -p $localRootDir/$deployUser
- hdfs dfs -chmod -R 775 $localRootDir/$deployUser
- else
- echo "does not support $HDFS_USER_ROOT_PATH filesystem types"
- fi
- fi
- isSuccess "create $HDFS_USER_ROOT_PATH directory"
-
-
- if [ "$RESULT_SET_ROOT_PATH" != "" ]
- then
- localRootDir=$RESULT_SET_ROOT_PATH
- if [[ $RESULT_SET_ROOT_PATH == file://* ]];then
- localRootDir=${RESULT_SET_ROOT_PATH#file://}
- mkdir -p $localRootDir/$deployUser
- sudo chmod -R 775 $localRootDir/$deployUser
- elif [[ $RESULT_SET_ROOT_PATH == hdfs://* ]];then
- localRootDir=${RESULT_SET_ROOT_PATH#hdfs://}
- hdfs dfs -mkdir -p $localRootDir/$deployUser
- hdfs dfs -chmod -R 775 $localRootDir/$deployUser
- else
- echo "does not support $RESULT_SET_ROOT_PATH filesystem types"
- fi
- fi
- isSuccess "create $RESULT_SET_ROOT_PATH directory"
-
-## sql init
-if [ "$YARN_RESTFUL_URL" != "" ]
-then
- sed -i ${txt} "s#@YARN_RESTFUL_URL#$YARN_RESTFUL_URL#g" $workDir/db/linkis_dml.sql
-fi
-
-common_conf=$workDir/conf/linkis.properties
-SERVER_IP=$local_host
-
-##Label set start
-if [ "$SPARK_VERSION" != "" ]
-then
- sed -i ${txt} "s#spark-2.4.3#spark-$SPARK_VERSION#g" $workDir/db/linkis_dml.sql
- executeCMD $SERVER_IP "sed -i ${txt} \"s#\#wds.linkis.spark.engine.version.*#wds.linkis.spark.engine.version=$SPARK_VERSION#g\" $common_conf"
-fi
-
-if [ "$HIVE_VERSION" != "" ]
-then
- sed -i ${txt} "s#hive-1.2.1#hive-$HIVE_VERSION#g" $workDir/db/linkis_dml.sql
- executeCMD $SERVER_IP "sed -i ${txt} \"s#\#wds.linkis.hive.engine.version.*#wds.linkis.hive.engine.version=$HIVE_VERSION#g\" $common_conf"
-fi
-
-if [ "$PYTHON_VERSION" != "" ]
-then
- sed -i ${txt} "s#python-python2#python-$PYTHON_VERSION#g" $workDir/db/linkis_dml.sql
- executeCMD $SERVER_IP "sed -i ${txt} \"s#\#wds.linkis.python.engine.version.*#wds.linkis.python.engine.version=$PYTHON_VERSION#g\" $common_conf"
-fi
-
-##Label set end
-
-
-#init db
-if [[ '2' = "$MYSQL_INSTALL_MODE" ]];then
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD --default-character-set=utf8 -e "CREATE DATABASE IF NOT EXISTS $MYSQL_DB DEFAULT CHARSET utf8 COLLATE utf8_general_ci;"
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/linkis_ddl.sql"
- isSuccess "source linkis_ddl.sql"
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 -e "source ${workDir}/db/linkis_dml.sql"
- isSuccess "source linkis_dml.sql"
- echo "Rebuild the table"
-fi
-###########################################################################
-#Deal special symbol '#'
-HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'})
-MYSQL_PASSWORD=$(echo ${MYSQL_PASSWORD//'#'/'\#'})
-
-#Deal common config
-echo "Update common config..."
-
-if test -z "$GATEWAY_INSTALL_IP"
-then
- export GATEWAY_INSTALL_IP="`hostname --fqdn`"
-fi
-
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.version.*#wds.linkis.server.version=$LINKIS_SERVER_VERSION#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.gateway.url.*#wds.linkis.gateway.url=http://$GATEWAY_INSTALL_IP:$GATEWAY_PORT#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.eureka.defaultZone.*#wds.linkis.eureka.defaultZone=$EUREKA_URL#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.url.*#wds.linkis.server.mybatis.datasource.url=jdbc:mysql://${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB}?characterEncoding=UTF-8#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.username.*#wds.linkis.server.mybatis.datasource.username=$MYSQL_USER#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.server.mybatis.datasource.password.*#wds.linkis.server.mybatis.datasource.password=$MYSQL_PASSWORD#g\" $common_conf"
-
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.url.*#wds.linkis.ldap.proxy.url=$LDAP_URL#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.baseDN.*#wds.linkis.ldap.proxy.baseDN=$LDAP_BASEDN#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.ldap.proxy.userNameFormat.*#wds.linkis.ldap.proxy.userNameFormat=$LDAP_USER_NAME_FORMAT#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.admin.user.*#wds.linkis.gateway.admin.user=$deployUser#g\" $common_conf"
-# hadoop config
-executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hadoop.config.dir.*#hadoop.config.dir=$HADOOP_CONF_DIR#g\" $common_conf"
-#hive config
-executeCMD $SERVER_IP "sed -i ${txt} \"s#\#hive.config.dir.*#hive.config.dir=$HIVE_CONF_DIR#g\" $common_conf"
-
-#spark config
-executeCMD $SERVER_IP "sed -i ${txt} \"s#spark.config.dir.*#spark.config.dir=$SPARK_CONF_DIR#g\" $common_conf"
-
-if [ "$HIVE_META_URL" != "" ]
-then
- executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.url.*#hive.meta.url=$HIVE_META_URL#g\" $common_conf"
-fi
-if [ "$HIVE_META_USER" != "" ]
-then
- executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.user.*#hive.meta.user=$HIVE_META_USER#g\" $common_conf"
-fi
-if [ "$HIVE_META_PASSWORD" != "" ]
-then
- HIVE_META_PASSWORD=$(echo ${HIVE_META_PASSWORD//'#'/'\#'})
- executeCMD $SERVER_IP "sed -i ${txt} \"s#hive.meta.password.*#hive.meta.password=$HIVE_META_PASSWORD#g\" $common_conf"
-fi
-
-
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.filesystem.root.path.*#wds.linkis.filesystem.root.path=$WORKSPACE_USER_ROOT_PATH#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.filesystem.hdfs.root.path.*#wds.linkis.filesystem.hdfs.root.path=$HDFS_USER_ROOT_PATH#g\" $common_conf"
-
-# engineconn
-if test -z $ENGINECONN_ROOT_PATH
-then
- ENGINECONN_ROOT_PATH=$workDir/engineroot
-fi
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.engineconn.root.dir.*#wds.linkis.engineconn.root.dir=$ENGINECONN_ROOT_PATH#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.engineconn.home.*#wds.linkis.engineconn.home=${workDir}/lib/linkis-engineconn-plugins#g\" $common_conf"
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.engineconn.plugin.loader.store.path.*#wds.linkis.engineconn.plugin.loader.store.path=${workDir}/lib/linkis-engineconn-plugins#g\" $common_conf"
-
-# common lib
-executeCMD $SERVER_IP "sed -i ${txt} \"s#wds.linkis.public_module.path.*#wds.linkis.public_module.path=${workDir}/lib/linkis-commons/public-module#g\" $common_conf"
-
-echo "Congratulations! You have installed Linkis $LINKIS_VERSION successfully, please use sbin/start-all.sh to start it!"
-
-
-
-
-
diff --git a/conf/db.sh b/conf/db.sh
deleted file mode 100644
index f19e83d..0000000
--- a/conf/db.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-### Used to store user's custom variables, user's configuration, UDFs and functions, while providing the JobHistory service
-MYSQL_HOST=
-MYSQL_PORT=
-MYSQL_DB=
-MYSQL_USER=
-MYSQL_PASSWORD=
\ No newline at end of file
diff --git a/conf/linkis-env.sh b/conf/linkis-env.sh
deleted file mode 100644
index 59319ac..0000000
--- a/conf/linkis-env.sh
+++ /dev/null
@@ -1,135 +0,0 @@
-#!/bin/bash
-#
-# description: Starts and stops Server
-#
-# @name: linkis-demo
-
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# SSH_PORT=22
-
-### deploy user
-deployUser=hadoop
-
-##Linkis_SERVER_VERSION
-LINKIS_SERVER_VERSION=v1
-
-### Specifies the user workspace, which is used to store the user's script files and log files.
-### Generally local directory
-WORKSPACE_USER_ROOT_PATH=file:///tmp/linkis/ ##file:// required
-### User's root hdfs path
-HDFS_USER_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
-
-### Path to store job ResultSet:file or hdfs path
-RESULT_SET_ROOT_PATH=hdfs:///tmp/linkis ##hdfs:// required
-
-### Path to store started engines and engine logs, must be local
-ENGINECONN_ROOT_PATH=/appcom/tmp ## file:// required
-
-ENTRANCE_CONFIG_LOG_PATH=hdfs:///tmp/linkis/ ##file:// required
-
-### Provide the DB information of Hive metadata database.
-HIVE_META_URL=
-HIVE_META_USER=
-HIVE_META_PASSWORD=
-
-##YARN REST URL spark engine required
-YARN_RESTFUL_URL=http://127.0.0.1:8088
-
-###HADOOP CONF DIR
-HADOOP_CONF_DIR=/appcom/config/hadoop-config
-
-###HIVE CONF DIR
-HIVE_CONF_DIR=/appcom/config/hive-config
-
-###SPARK CONF DIR
-SPARK_CONF_DIR=/appcom/config/spark-config
-
-## Engine version conf
-#SPARK_VERSION
-#SPARK_VERSION=2.4.3
-##HIVE_VERSION
-#HIVE_VERSION=1.2.1
-#PYTHON_VERSION=python2
-
-################### The install Configuration of all Micro-Services #####################
-#
-# NOTICE:
-# 1. If you just wanna try, the following micro-service configuration can be set without any settings.
-# These services will be installed by default on this machine.
-# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install
-# Linkis in a distributed manner and set the following microservice parameters
-#
-
-### EUREKA install information
-### You can access it in your browser at the address below:http://${EUREKA_INSTALL_IP}:${EUREKA_PORT}
-#EUREKA_INSTALL_IP=127.0.0.1 # Microservices Service Registration Discovery Center
-EUREKA_PORT=20303
-EUREKA_PREFER_IP=false
-
-### Gateway install information
-#GATEWAY_INSTALL_IP=127.0.0.1
-GATEWAY_PORT=9001
-
-### ApplicationManager
-#MANAGER_INSTALL_IP=127.0.0.1
-MANAGER_PORT=9101
-
-### EngineManager
-#ENGINECONNMANAGER_INSTALL_IP=127.0.0.1
-ENGINECONNMANAGER_PORT=9102
-
-
-
-### EnginePluginServer
-#ENGINECONN_PLUGIN_SERVER_INSTALL_IP=127.0.0.1
-ENGINECONN_PLUGIN_SERVER_PORT=9103
-
-### LinkisEntrance
-#ENTRANCE_INSTALL_IP=127.0.0.1
-ENTRANCE_PORT=9104
-
-### publicservice
-#PUBLICSERVICE_INSTALL_IP=127.0.0.1
-PUBLICSERVICE_PORT=9105
-
-
-### Hive Metadata Query service, provide the metadata information of Hive databases.
-#DATASOURCE_INSTALL_IP=127.0.0.1
-DATASOURCE_PORT=9106
-
-### BML
-### This service is used to provide BML capability.
-#BML_INSTALL_IP=127.0.0.1
-BML_PORT=9107
-
-### cs
-#CS_INSTALL_IP=127.0.0.1
-CS_PORT=9108
-
-########################################################################################
-
-## LDAP is for enterprise authorization, if you just want to have a try, ignore it.
-#LDAP_URL=ldap://localhost:1389/
-#LDAP_BASEDN=dc=webank,dc=com
-#LDAP_USER_NAME_FORMAT=cn=%s@xxx.com,OU=xxx,DC=xxx,DC=com
-
-## java application default jvm memory
-export SERVER_HEAP_SIZE="512M"
-
-if test -z "$EUREKA_INSTALL_IP"
-then
- export EUREKA_INSTALL_IP="`hostname --fqdn`"
-fi
-if [ "true" != "$EUREKA_PREFER_IP" ]
-then
- export EUREKA_HOSTNAME=$EUREKA_INSTALL_IP
-fi
-export EUREKA_URL=http://$EUREKA_INSTALL_IP:$EUREKA_PORT/eureka/
-
-LINKIS_VERSION=1.0.0
-
-# for install
-LINKIS_PUBLIC_MODULE=lib/linkis-commons/public-module
\ No newline at end of file
diff --git a/conf/linkis.properties b/conf/linkis.properties
deleted file mode 100644
index 5654baf..0000000
--- a/conf/linkis.properties
+++ /dev/null
@@ -1,51 +0,0 @@
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-##
-#wds.linkis.test.mode=true
-wds.linkis.server.version=v1
-##spring conf
-wds.linkis.gateway.url=http://127.0.0.1:9001
-wds.linkis.eureka.defaultZone=http://127.0.0.1:20303/eureka/
-##mybatis
-wds.linkis.server.mybatis.datasource.url=
-wds.linkis.server.mybatis.datasource.username=
-wds.linkis.server.mybatis.datasource.password=
-##hive meta
-hive.meta.url=
-hive.meta.user=
-hive.meta.password=
-##LDAP
-wds.linkis.ldap.proxy.url=
-wds.linkis.ldap.proxy.baseDN=
-wds.linkis.ldap.proxy.userNameFormat=
-
-wds.linkis.admin.user=hadoop
-#hadoopconfig
-#hadoop.config.dir=/appcom/config/hadoop-config
-#hive.config.dir=
-#spark.config.dir
-##fileSystem
-wds.linkis.filesystem.root.path=file:///tmp/linkis/
-wds.linkis.filesystem.hdfs.root.path=hdfs:///tmp/linkis/
-#engine plugin
-wds.linkis.engineconn.root.dir=/appcom/tmp
-wds.linkis.engineconn.home=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins
-wds.linkis.engineconn.plugin.loader.store.path=/appcom/Install/LinkisInstall/lib/linkis-engineconn-plugins
-wds.linkis.public_module.path=/appcom/Install/LinkisInstall/lib/linkis-commons/public-module
-##engine Version
-#wds.linkis.spark.engine.version=
-#wds.linkis.hive.engine.version=
-#wds.linkis.python.engine.version=
\ No newline at end of file
diff --git a/db/linkis_ddl.sql b/db/linkis_ddl.sql
index cf3f3c5..5bfb25e 100644
--- a/db/linkis_ddl.sql
+++ b/db/linkis_ddl.sql
@@ -32,7 +32,7 @@
CREATE TABLE linkis_ps_configuration_config_value(
`id` bigint(20) NOT NULL AUTO_INCREMENT,
`config_key_id` bigint(20),
- `config_value` varchar(50),
+ `config_value` varchar(200),
`config_label_id`int(20),
`update_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
`create_time` datetime NOT NULL DEFAULT CURRENT_TIMESTAMP,
@@ -76,6 +76,7 @@
`instances` varchar(250) DEFAULT NULL COMMENT 'Entrance instances',
`metrics` text DEFAULT NULL COMMENT 'Job Metrics',
`engine_type` varchar(32) DEFAULT NULL COMMENT 'Engine type',
+ `execution_code` text DEFAULT NULL COMMENT 'Job origin code or code path',
PRIMARY KEY (`id`),
KEY `created_time` (`created_time`),
KEY `submit_user` (`submit_user`)
@@ -343,7 +344,7 @@
`context_scope` varchar(32) DEFAULT NULL,
`context_type` varchar(32) DEFAULT NULL,
`props` text,
- `value` text,
+ `value` mediumtext,
`context_id` int(11) DEFAULT NULL,
`keywords` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
@@ -392,7 +393,7 @@
`backup_instance` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `instance` (`instance`(128)),
- KEY `backup_instance` (`backup_instance`(128)),
+ KEY `backup_instance` (`backup_instance`(191)),
KEY `instance_2` (`instance`(128),`backup_instance`(128))
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
@@ -570,7 +571,7 @@
CREATE TABLE `linkis_ps_instance_label_relation` (
`id` int(20) NOT NULL AUTO_INCREMENT,
`label_id` int(20) DEFAULT NULL COMMENT 'id reference linkis_ps_instance_label -> id',
- `service_instance` varchar(128) NOT NULL COLLATE utf8_bin COMMENT 'structure like ${host|machine}:${port}',
+ `service_instance` varchar(64) NOT NULL COLLATE utf8_bin COMMENT 'structure like ${host|machine}:${port}',
`update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'update unix timestamp',
`create_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'create unix timestamp',
PRIMARY KEY (`id`)
@@ -658,8 +659,8 @@
DROP TABLE IF EXISTS `linkis_cg_manager_engine_em`;
CREATE TABLE `linkis_cg_manager_engine_em` (
`id` int(20) NOT NULL AUTO_INCREMENT,
- `engine_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
- `em_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+ `engine_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL,
+ `em_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL,
`update_time` datetime DEFAULT CURRENT_TIMESTAMP,
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
@@ -705,7 +706,7 @@
CREATE TABLE `linkis_cg_manager_label_service_instance` (
`id` int(20) NOT NULL AUTO_INCREMENT,
`label_id` int(20) DEFAULT NULL,
- `service_instance` varchar(128) COLLATE utf8_bin DEFAULT NULL,
+ `service_instance` varchar(64) COLLATE utf8_bin DEFAULT NULL,
`update_time` datetime DEFAULT CURRENT_TIMESTAMP,
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (`id`)
diff --git a/db/linkis_dml.sql b/db/linkis_dml.sql
index bc4746a..a73637f 100644
--- a/db/linkis_dml.sql
+++ b/db/linkis_dml.sql
@@ -2,7 +2,7 @@
-- 变量:
SET @SPARK_LABEL="spark-2.4.3";
-SET @HIVE_LABEL="hive-1.2.1";
+SET @HIVE_LABEL="hive-2.3.3";
SET @PYTHON_LABEL="python-python2";
SET @PIPELINE_LABEL="pipeline-*";
SET @JDBC_LABEL="jdbc-4";
@@ -29,54 +29,54 @@
-- Global Settings
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'ide', 'None', NULL, '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', '队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局各个引擎内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局各个引擎核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局各个引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源');
-- spark
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', 'yarn队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', '2', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', '2', 'NumInterval', '[1,2]', '1', '0', '1','spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', '3', 'NumInterval', '[3,15]', '0', '0', '3', 'spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小','2', 'NumInterval', '[1,15]', '0', '0', '1', 'spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '2', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '2', 'NumInterval', '[1,2]', '0', '0', '1','spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', 'spark执行器内存大小', '3g', 'Regex', '^([3-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '0', '1', '1', 'spark资源设置','spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','2g', 'Regex', '^([3-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark');
-- hive
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'hive引擎设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','2g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
-- python
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.java.client.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1', 'NumInterval', '[1,2]', '0', '0', '1', 'python引擎设置', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'python引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1g', 'Regex', '^([1-2])(G|g)$', '0', '0', '1', 'python引擎设置', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python');
-- pipeline
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.engine.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'pipeline资源设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'pipeline资源设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-- jdbc
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\s*jdbc:\w+://([^:]+)(:\d+)(/[^\?]+)?(\?\S*)?$', '0', '0', '1', '数据源配置', 'jdbc');
+insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\\s*jdbc:\\w+://([^:]+)(:\\d+)(/[^\\?]+)?(\\?\\S*)?$', '0', '0', '1', '数据源配置', 'jdbc');
insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.version', '取值范围:jdbc3,jdbc4', 'jdbc版本','jdbc4', 'OFT', '[\"jdbc3\",\"jdbc4\"]', '0', '0', '1', '数据源配置', 'jdbc');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', '', '', '0', '0', '1', '用户配置', 'jdbc');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', '', '', '0', '0', '1', '用户配置', 'jdbc');
+insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', 'None', '', '0', '0', '1', '用户配置', 'jdbc');
+insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', 'None', '', '0', '0', '1', '用户配置', 'jdbc');
insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.max', '范围:1-20,单位:个', 'jdbc引擎最大连接数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '数据源配置', 'jdbc');
+
-- Configuration first level directory
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-全局设置,*-*', 'OPTIONAL', 2, now(), now());
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now());
@@ -219,84 +219,85 @@
(1,'Yarn','sit',NULL,'{\r\n\"rmWebAddress\": \"@YARN_RESTFUL_URL\",\r\n\"hadoopVersion\": \"2.7.2\",\r\n\"authorEnable\":false,\r\n\"user\":\"hadoop\",\r\n\"pwd\":\"123456\"\r\n}');
-- errorcode
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (1,'10001','会话创建失败,%s队列不存在,请检查队列设置是否正确','queue (\\S+) is not exists in YARN',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (2,'10001','会话创建失败,用户%s不能提交应用到队列:%s,请检查队列设置是否正确','User (\\S+) cannot submit applications to queue (\\S+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (3,'20001','Session创建失败,当前申请资源%s,队列可用资源%s,请检查资源配置是否合理','您本次向任务队列([a-zA-Z_0-9\\.]+)请求资源((.+)),任务队列最大可用资源(.+),任务队列剩余可用资源((.+))您已占用任务队列资源(.+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (4,'20002','Session创建失败,服务器资源不足,请稍后再试','远程服务器没有足够资源实例化[a-zA-Z]+ Session,通常是由于您设置【驱动内存】或【客户端内存】过高导致的,建议kill脚本,调低参数后重新提交!等待下次调度...',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (5,'20003','Session创建失败,队列资源不足,请稍后再试','request resources from ResourceManager has reached 560++ tries, give up and mark it as FAILED.',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (6,'20003','文件%s不存在','Caused by:\\s*java.io.FileNotFoundException',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (7,'20083','Java进程内存溢出','OutOfMemoryError',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (8,'30001','%s无权限访问,请申请开通数据表权限','Permission denied:\\s*user=[a-zA-Z0-9_]+,\\s*access=[A-Z]+\\s*,\\s*inode="([a-zA-Z0-9/_\\.]+)"',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (9,'40001','数据库%s不存在,请检查引用的数据库是否有误','Database ''([a-zA-Z_0-9]+)'' not found',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (10,'40001','数据库%s不存在,请检查引用的数据库是否有误','Database does not exist: ([a-zA-Z_0-9]+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (11,'40002','表%s不存在,请检查引用的表是否有误','Table or view not found: ([`\\.a-zA-Z_0-9]+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (12,'40002','表%s不存在,请检查引用的表是否有误','Table not found ''([a-zA-Z_0-9]+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (13,'40003','字段%s不存在,请检查引用的字段是否有误','cannot resolve ''`(.+)`'' given input columns',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (14,'40003','字段%s不存在,请检查引用的字段是否有误',' Invalid table alias or column reference ''(.+)'':',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (15,'40004','分区字段%s不存在,请检查引用的表%s是否为分区表或分区字段有误','([a-zA-Z_0-9]+) is not a valid partition column in table ([`\\.a-zA-Z_0-9]+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (16,'40004','分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误','Partition spec \\{(\\S+)\\} contains non-partition columns',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (17,'40004','分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误','table is not partitioned but partition spec exists:\\{(.+)\\}',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (18,'50001','括号不匹配,请检查代码中括号是否前后匹配','extraneous input ''\\)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (20,'50002','非聚合函数%s必须写在group by中,请检查代码的group by语法','expression ''(\\S+)'' is neither present in the group by',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (21,'50002','非聚合函数%s必须写在group by中,请检查代码的group by语法','grouping expressions sequence is empty,\\s?and ''(\\S+)'' is not an aggregate function',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (22,'50002','非聚合函数%s必须写在group by中,请检查代码的group by语法','Expression not in GROUP BY key ''(\\S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (23,'50003','未知函数%s,请检查代码中引用的函数是否有误','Undefined function: ''(\\S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (24,'50003','未知函数%s,请检查代码中引用的函数是否有误','Invalid function ''(\\S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (25,'50004','字段%s存在名字冲突,请检查子查询内是否有同名字段','Reference ''(\\S+)'' is ambiguous',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (26,'50004','字段%s存在名字冲突,请检查子查询内是否有同名字段','Ambiguous column Reference ''(\\S+)'' in subquery',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (27,'50005','字段%s必须指定表或者子查询别名,请检查该字段来源','Column ''(\\S+)'' Found in more than One Tables/Subqueries',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (28,'50006','表%s在数据库%s中已经存在,请删除相应表后重试','Table or view ''(\\S+)'' already exists in database ''(\\S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (29,'50006','表%s在数据库中已经存在,请删除相应表后重试','Table (\\S+) already exists',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (30,'50006','表%s在数据库中已经存在,请删除相应表后重试','Table already exists',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (31,'50006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (S+) already exists',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (32,'64001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (33,'64002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (34,'64003','导出文件时无法创建目录:%s','java.io.IOException: Mkdirs failed to create (\\S+) (.+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (35,'50008','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (S+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (36,'50009','插入目标表字段数量不匹配,请检查代码!','requires that the data to be inserted have the same number of columns as the target table',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (38,'50011','数据类型不匹配,请检查代码!','due to data type mismatch: differing types in',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (39,'50012','字段%s引用有误,请检查字段是否存在!','Invalid column reference (S+)',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (40,'50013','字段%s提取数据失败','Can''t extract value from (S+): need struct type but got string',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (41,'50014','括号或者关键字不匹配,请检查代码!','mismatched input ''(\\S+)'' expecting',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (42,'50015','group by 位置2不在select列表中,请检查代码!','GROUP BY position (S+) is not in select list',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (43,'50016','代码中存在NoneType空类型变量,请检查代码','''NoneType'' object',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (44,'50017','数组越界','IndexError:List index out of range',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (45,'50013','字段提取数据失败请检查字段类型','Can''t extract value from (S+): need struct type but got string',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (46,'50018','插入数据未指定目标表字段%s,请检查代码!','Cannot insert into target table because column number/types are different ''(S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (47,'50019','表别名%s错误,请检查代码!','Invalid table alias ''(\\S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (48,'50020','UDF函数未指定参数,请检查代码!','UDFArgumentException Argument expected',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (49,'50021','聚合函数%s不能写在group by 中,请检查代码!','aggregate functions are not allowed in GROUP BY',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (50,'50007','您的代码有语法错误,请您修改代码之后执行','SyntaxError',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (51,'40002','表不存在,请检查引用的表是否有误','Table not found',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (52,'40003','函数使用错误,请检查您使用的函数方式','No matching method',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (53,'50032','用户主动kill任务','is killed by user',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (54,'60001','python代码变量%s未定义','name ''(S+)'' is not defined',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (55,'60002','python udf %s 未定义','Undefined function:s+''(S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (56,'50007','您的sql代码可能有语法错误,请检查sql代码','FAILED: ParseException',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (57,'50007','您的sql代码可能有语法错误,请检查sql代码','org.apache.spark.sql.catalyst.parser.ParseException',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (58,'60003','脚本语法有误','ParseException:',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (59,'60010','您可能没有相关权限','Permission denied',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (60,'61027','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(S+)'' and ''(S+)''',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (61,'60020','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (62,'61028','python代码缩进对齐有误','unexpected indent',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (63,'60078','个人库超过限制','is exceeded',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (64,'69582','python代码缩进有误','unexpected indent',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (65,'69583','python代码反斜杠后面必须换行','unexpected character after line',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (66,'60091','导出Excel表超过最大限制1048575','Invalid row number',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (67,'60092','python save as table未指定格式,默认用parquet保存,hive查询报错','parquet.io.ParquetDecodingException',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (68,'11011','远程服务器内存资源不足','errCode: 11011',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (69,'11012','远程服务器CPU资源不足','errCode: 11012',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (70,'11013','远程服务器实例资源不足','errCode: 11013',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (71,'11014','队列CPU资源不足','errCode: 11014',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (72,'11015','队列内存资源不足','errCode: 11015',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (73,'11016','队列实例数超过限制','errCode: 11016',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (74,'11017','超出全局计算引擎实例限制','errCode: 11017',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (75,'60035','资源不足,启动引擎失败','资源不足',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (76,'60075','获取Yarn队列信息异常,可能是您设置的yarn队列不存在','获取Yarn队列信息异常',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (78,'95001','找不到变量值,请确认您是否设置相关变量','not find variable substitution for',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (79,'95002','不存在的代理用户,请检查你是否申请过平台层(bdp或者bdap)用户','failed to change current working directory ownership',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (80,'95003','请检查提交用户在WTSS内是否有该代理用户的权限,代理用户中是否存在特殊字符,是否用错了代理用户,OS层面是否有该用户,系统设置里面是否设置了该用户为代理用户','没有权限执行当前任务',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (81,'95004','平台层不存在您的执行用户,请在ITSM申请平台层(bdp或者bdap)用户','使用chown命令修改',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (82,'95005','未配置代理用户,请在ITSM走WTSS用户变更单,为你的用户授权改代理用户','请联系系统管理员为您的用户添加该代理用户',0);
-insert into linkis_ps_error_code (id,error_code,error_desc,error_regex,error_type) values (83,'95006','Could not open input file for reading%does not exist','JobServer中不存在您的脚本文件,请将你的脚本文件放入对应的JobServer路径中',0);
-insert into linkis_ps_error_code (error_code,error_desc,error_regex,error_type) values ('60079','所查库表无权限','Authorization failed:No privilege',0);
\ No newline at end of file
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('10001','会话创建失败,%s队列不存在,请检查队列设置是否正确','queue (\\S+) is not exists in YARN',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('10001','会话创建失败,用户%s不能提交应用到队列:%s,请联系提供队列给您的人员','User (\\S+) cannot submit applications to queue (\\S+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('20001','Session创建失败,当前申请资源%s,队列可用资源%s,请检查资源配置是否合理','您本次向任务队列([a-zA-Z_0-9\\.]+)请求资源((.+)),任务队列最大可用资源(.+),任务队列剩余可用资源((.+))您已占用任务队列资源(.+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('20002','Session创建失败,服务器资源不足,请稍后再试','远程服务器没有足够资源实例化[a-zA-Z]+ Session,通常是由于您设置【驱动内存】或【客户端内存】过高导致的,建议kill脚本,调低参数后重新提交!等待下次调度...',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('20003','Session创建失败,队列资源不足,请稍后再试','request resources from ResourceManager has reached 560++ tries, give up and mark it as FAILED.',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('20003','文件%s不存在','Caused by:\\s*java.io.FileNotFoundException',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('20083','Java进程内存溢出,建议优化脚本内容','OutOfMemoryError',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30001','%s无权限访问,请申请开通数据表权限,请联系您的数据管理人员','Permission denied:\\s*user=[a-zA-Z0-9_]+,\\s*access=[A-Z]+\\s*,\\s*inode="([a-zA-Z0-9/_\\.]+)"',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40001','数据库%s不存在,请检查引用的数据库是否有误','Database ''([a-zA-Z_0-9]+)'' not found',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40001','数据库%s不存在,请检查引用的数据库是否有误','Database does not exist: ([a-zA-Z_0-9]+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40002','表%s不存在,请检查引用的表是否有误','Table or view not found: ([`\\.a-zA-Z_0-9]+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40002','表%s不存在,请检查引用的表是否有误','Table not found ''([a-zA-Z_0-9]+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40003','字段%s不存在,请检查引用的字段是否有误','cannot resolve ''`(.+)`'' given input columns',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40003','字段%s不存在,请检查引用的字段是否有误',' Invalid table alias or column reference ''(.+)'':',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40004','分区字段%s不存在,请检查引用的表%s是否为分区表或分区字段有误','([a-zA-Z_0-9]+) is not a valid partition column in table ([`\\.a-zA-Z_0-9]+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40004','分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误','Partition spec \\{(\\S+)\\} contains non-partition columns',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40004','分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误','table is not partitioned but partition spec exists:\\{(.+)\\}',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40004','表对应的路径不存在,请联系您的数据管理人员','Path does not exist: viewfs',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50001','括号不匹配,请检查代码中括号是否前后匹配','extraneous input ''\\)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50002','非聚合函数%s必须写在group by中,请检查代码的group by语法','expression ''(\\S+)'' is neither present in the group by',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50002','非聚合函数%s必须写在group by中,请检查代码的group by语法','grouping expressions sequence is empty,\\s?and ''(\\S+)'' is not an aggregate function',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50002','非聚合函数%s必须写在group by中,请检查代码的group by语法','Expression not in GROUP BY key ''(\\S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50003','未知函数%s,请检查代码中引用的函数是否有误','Undefined function: ''(\\S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50003','未知函数%s,请检查代码中引用的函数是否有误','Invalid function ''(\\S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50004','字段%s存在名字冲突,请检查子查询内是否有同名字段','Reference ''(\\S+)'' is ambiguous',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50004','字段%s存在名字冲突,请检查子查询内是否有同名字段','Ambiguous column Reference ''(\\S+)'' in subquery',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50005','字段%s必须指定表或者子查询别名,请检查该字段来源','Column ''(\\S+)'' Found in more than One Tables/Subqueries',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50006','表%s在数据库%s中已经存在,请删除相应表后重试','Table or view ''(\\S+)'' already exists in database ''(\\S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50006','表%s在数据库中已经存在,请删除相应表后重试','Table (\\S+) already exists',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50006','表%s在数据库中已经存在,请删除相应表后重试','Table already exists',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50006','表%s在数据库中已经存在,请删除相应表后重试','AnalysisException: (S+) already exists',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('64001','找不到导入文件地址:%s','java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('64002','导出为excel时临时文件目录权限异常','java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming.SXSSFWorkbook.createAndRegisterSXSSFSheet',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('64003','导出文件时无法创建目录:%s','java.io.IOException: Mkdirs failed to create (\\S+) (.+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50008','导入模块错误,系统没有%s模块,请联系运维人员安装','ImportError: No module named (S+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50009','插入目标表字段数量不匹配,请检查代码!','requires that the data to be inserted have the same number of columns as the target table',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50011','数据类型不匹配,请检查代码!','due to data type mismatch: differing types in',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50012','字段%s引用有误,请检查字段是否存在!','Invalid column reference (S+)',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50013','字段%s提取数据失败','Can''t extract value from (S+): need struct type but got string',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50014','括号或者关键字不匹配,请检查代码!','mismatched input ''(\\S+)'' expecting',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50015','group by 位置2不在select列表中,请检查代码!','GROUP BY position (S+) is not in select list',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50016','代码中存在NoneType空类型变量,请检查代码','''NoneType'' object',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50017','数组越界','IndexError:List index out of range',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50013','字段提取数据失败请检查字段类型','Can''t extract value from (S+): need struct type but got string',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50018','插入数据未指定目标表字段%s,请检查代码!','Cannot insert into target table because column number/types are different ''(S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50019','表别名%s错误,请检查代码!','Invalid table alias ''(\\S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50020','UDF函数未指定参数,请检查代码!','UDFArgumentException Argument expected',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50021','聚合函数%s不能写在group by 中,请检查代码!','aggregate functions are not allowed in GROUP BY',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50007','您的代码有语法错误,请您修改代码之后执行','SyntaxError',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40002','表不存在,请检查引用的表是否有误','Table not found',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('40003','函数使用错误,请检查您使用的函数方式','No matching method',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50032','用户主动kill任务','is killed by user',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60001','python代码变量%s未定义','name ''(S+)'' is not defined',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60002','python udf %s 未定义','Undefined function:s+''(S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50007','您的sql代码可能有语法错误,请检查sql代码','FAILED: ParseException',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('50007','您的sql代码可能有语法错误,请检查sql代码','org.apache.spark.sql.catalyst.parser.ParseException',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60003','脚本语法有误','ParseException:',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60010','您可能没有相关权限','Permission denied',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('61027','python执行不能将%s和%s两种类型进行连接','cannot concatenate ''(S+)'' and ''(S+)''',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60020','pyspark执行失败,可能是语法错误或stage失败','Py4JJavaError: An error occurred',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('61028','python代码缩进对齐有误','unexpected indent',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60078','个人库超过限制','is exceeded',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('69582','python代码缩进有误','unexpected indent',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('69583','python代码反斜杠后面必须换行','unexpected character after line',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60091','导出Excel表超过最大限制1048575','Invalid row number',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60092','python save as table未指定格式,默认用parquet保存,hive查询报错','parquet.io.ParquetDecodingException',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11011','远程服务器内存资源不足','errCode: 11011',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11012','远程服务器CPU资源不足','errCode: 11012',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11013','远程服务器实例资源不足','errCode: 11013',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11014','队列CPU资源不足','errCode: 11014',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11015','队列内存资源不足','errCode: 11015',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11016','队列实例数超过限制','errCode: 11016',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('11017','超出全局计算引擎实例限制','errCode: 11017',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60035','资源不足,启动引擎失败','资源不足',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60075','获取Yarn队列信息异常,可能是您设置的yarn队列不存在','获取Yarn队列信息异常',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('95001','找不到变量值,请确认您是否设置相关变量','not find variable substitution for',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('95002','不存在的代理用户,请检查你是否申请过平台层(bdp或者bdap)用户','failed to change current working directory ownership',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('95003','请检查提交用户在WTSS内是否有该代理用户的权限,代理用户中是否存在特殊字符,是否用错了代理用户,OS层面是否有该用户,系统设置里面是否设置了该用户为代理用户','没有权限执行当前任务',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('95004','平台层不存在您的执行用户,请在ITSM申请平台层(bdp或者bdap)用户','使用chown命令修改',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('95005','未配置代理用户,请在ITSM走WTSS用户变更单,为你的用户授权改代理用户','请联系系统管理员为您的用户添加该代理用户',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('60079','所查库表无权限','Authorization failed:No privilege',0);
+INSERT INTO linkis_ps_error_code (error_code,error_desc,error_regex,error_type) VALUES ('30002','启动引擎超时,您可以进行任务重试','wait for DefaultEngineConn',0);
\ No newline at end of file
diff --git a/db/module/linkis-cs.sql b/db/module/linkis-cs.sql
index 9d37c04..1399008 100644
--- a/db/module/linkis-cs.sql
+++ b/db/module/linkis-cs.sql
@@ -10,7 +10,7 @@
`context_scope` varchar(32) DEFAULT NULL,
`context_type` varchar(32) DEFAULT NULL,
`props` text,
- `value` text,
+ `value` mediumtext,
`context_id` int(11) DEFAULT NULL,
`keywords` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`),
diff --git a/db/module/linkis_configuration_dml.sql b/db/module/linkis_configuration_dml.sql
index 13d57cc..0527324 100644
--- a/db/module/linkis_configuration_dml.sql
+++ b/db/module/linkis_configuration_dml.sql
@@ -29,54 +29,54 @@
- Default Key of Configuration
-- Global Settings
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue', 'yarn队列名', 'yarn队列名', 'default', 'None', NULL, '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', 'yarn队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '全局驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '全局驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1][0-2][0-8])$', '0', '0', '1', '队列资源');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '全局引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源');
-- spark
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.instance.max', '取值范围:1-128,单位:个', 'yarn队列实例最大个数', '30', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.cores.max', '取值范围:1-500,单位:个', '队列CPU使用上限', '150', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|500)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.yarnqueue.memory.max', '取值范围:1-1000,单位:G', '队列内存使用上限', '300G', 'Regex', '^([1-9]\\d{0,2}|1000)(G|g)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', '执行器实例最大并发数', '2', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', '执行器核心个数', '2', 'NumInterval', '[1,2]', '1', '0', '1','spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', '执行器内存大小', '3', 'NumInterval', '[3,15]', '0', '0', '3', 'spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', '驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', '驱动器内存大小','2', 'NumInterval', '[1,15]', '0', '0', '1', 'spark资源设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'spark引擎最大并发数', '3', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.instances', '取值范围:1-40,单位:个', 'spark执行器实例最大并发数', '2', 'NumInterval', '[1,40]', '0', '0', '2', 'spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.cores', '取值范围:1-8,单位:个', 'spark执行器核心个数', '2', 'NumInterval', '[1,2]', '1', '0', '1','spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.executor.memory', '取值范围:3-15,单位:G', 'spark执行器内存大小', '3g', 'Regex', '^([3-9]|1[0-5])(G|g)$', '0', '0', '3', 'spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.cores', '取值范围:只能取1,单位:个', 'spark驱动器核心个数', '1', 'NumInterval', '[1,1]', '1', '1', '1', 'spark资源设置','spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.driver.memory', '取值范围:1-15,单位:G', 'spark驱动器内存大小','2g', 'Regex', '^([3-9]|1[0-5])(G|g)$', '0', '0', '1', 'spark资源设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.pd.addresses', NULL, NULL, 'pd0:2379', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.addr', NULL, NULL, 'tidb', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.password', NULL, NULL, NULL, 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.port', NULL, NULL, '4000', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.tispark.tidb.user', NULL, NULL, 'root', 'None', NULL, '0', '0', '1', 'tidb设置', 'spark');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('spark.python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'spark引擎设置', 'spark');
-- hive
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'hive引擎设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'hive引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'hive引擎初始化内存大小','2g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'hive引擎设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.client.java.opts', 'hive客户端进程参数', 'hive引擎启动时jvm参数','', 'None', NULL, '1', '1', '1', 'hive引擎设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('mapred.reduce.tasks', '范围:1-20,单位:个', 'reduce数', '10', 'NumInterval', '[1,20]', '0', '1', '1', 'hive资源设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('dfs.block.size', '取值范围:2-10,单位:G', 'map数据块大小', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('hive.exec.reduce.bytes.per.reducer', '取值范围:2-10,单位:G', 'reduce处理的数据量', '10', 'NumInterval', '[2,10]', '0', '1', '1', 'hive资源设置', 'hive');
-- python
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', '驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', '驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', '引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.java.client.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1', 'NumInterval', '[1,2]', '0', '0', '1', 'python引擎设置', 'python');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.memory.max', '取值范围:1-100,单位:G', 'python驱动器内存使用上限', '20G', 'Regex', '^([1-9]\\d{0,1}|100)(G|g)$', '0', '0', '1', '队列资源', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.client.core.max', '取值范围:1-128,单位:个', 'python驱动器核心个数上限', '10', 'Regex', '^(?:[1-9]\\d?|[1234]\\d{2}|128)$', '0', '0', '1', '队列资源', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-20,单位:个', 'python引擎最大并发数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '队列资源', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-2,单位:G', 'python引擎初始化内存大小', '1g', 'Regex', '^([1-2])(G|g)$', '0', '0', '1', 'python引擎设置', 'python');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('python.version', '取值范围:python2,python3', 'python版本','python2', 'OFT', '[\"python3\",\"python2\"]', '0', '0', '1', 'python引擎设置', 'python');
-- pipeline
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.engine.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2', 'NumInterval', '[1,10]', '0', '0', '1', 'pipeline资源设置', 'pipeline');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.mold', '取值范围:csv或excel', '结果集导出类型','csv', 'OFT', '[\"csv\",\"excel\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.field.split', '取值范围:,或\\t', 'csv分隔符',',', 'OFT', '[\",\",\"\\\\t\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.charset', '取值范围:utf-8或gbk', '结果集导出字符集','gbk', 'OFT', '[\"utf-8\",\"gbk\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.isoverwtite', '取值范围:true或false', '是否覆写','true', 'OFT', '[\"true\",\"false\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.rm.instance', '范围:1-3,单位:个', 'pipeline引擎最大并发数','3', 'NumInterval', '[1,3]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.engineconn.java.driver.memory', '取值范围:1-10,单位:G', 'pipeline引擎初始化内存大小','2g', 'Regex', '^([1-9]|10)(G|g)$', '0', '0', '1', 'pipeline资源设置', 'pipeline');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('pipeline.output.shuffle.null.type', '取值范围:NULL或者BLANK', '空值替换','NULL', 'OFT', '[\"NULL\",\"BLANK\"]', '0', '0', '1', 'pipeline引擎设置', 'pipeline');
-- jdbc
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\s*jdbc:\w+://([^:]+)(:\d+)(/[^\?]+)?(\?\S*)?$', '0', '0', '1', '数据源配置', 'jdbc');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.version', '取值范围:jdbc3,jdbc4', 'jdbc版本','jdbc4', 'OFT', '[\"jdbc3\",\"jdbc4\"]', '0', '0', '1', '数据源配置', 'jdbc');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', '', '', '0', '0', '1', '用户配置', 'jdbc');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', '', '', '0', '0', '1', '用户配置', 'jdbc');
-insert into `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.max', '范围:1-20,单位:个', 'jdbc引擎最大连接数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '数据源配置', 'jdbc');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.url', '例如:jdbc:hive2://127.0.0.1:10000', 'jdbc连接地址', 'jdbc:hive2://127.0.0.1:10000', 'Regex', '^\s*jdbc:\w+://([^:]+)(:\d+)(/[^\?]+)?(\?\S*)?$', '0', '0', '1', '数据源配置', 'jdbc');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.version', '取值范围:jdbc3,jdbc4', 'jdbc版本','jdbc4', 'OFT', '[\"jdbc3\",\"jdbc4\"]', '0', '0', '1', '数据源配置', 'jdbc');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.username', 'username', '数据库连接用户名', '', '', '', '0', '0', '1', '用户配置', 'jdbc');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.password', 'password', '数据库连接密码', '', '', '', '0', '0', '1', '用户配置', 'jdbc');
+INSERT INTO `linkis_ps_configuration_config_key` (`key`, `description`, `name`, `default_value`, `validate_type`, `validate_range`, `is_hidden`, `is_advanced`, `level`, `treeName`, `engine_conn_type`) VALUES ('wds.linkis.jdbc.connect.max', '范围:1-20,单位:个', 'jdbc引擎最大连接数', '10', 'NumInterval', '[1,20]', '0', '0', '1', '数据源配置', 'jdbc');
+
-- Configuration first level directory
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-全局设置,*-*', 'OPTIONAL', 2, now(), now());
insert into `linkis_cg_manager_label` (`label_key`, `label_value`, `label_feature`, `label_value_size`, `update_time`, `create_time`) VALUES ('combined_userCreator_engineType','*-IDE,*-*', 'OPTIONAL', 2, now(), now());
@@ -209,7 +209,8 @@
(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @PIPELINE_ALL);
--- jdbc default configuration
-insert into `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
-(select `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
+-- jdbc默认配置
+INSERT INTO `linkis_ps_configuration_config_value` (`config_key_id`, `config_value`, `config_label_id`)
+(SELECT `relation`.`config_key_id` AS `config_key_id`, '' AS `config_value`, `relation`.`engine_type_label_id` AS `config_label_id` FROM linkis_ps_configuration_key_engine_relation relation
INNER JOIN linkis_cg_manager_label label ON relation.engine_type_label_id = label.id AND label.label_value = @JDBC_ALL);
+
diff --git a/db/module/linkis_query.sql b/db/module/linkis_query.sql
index 8a201a2..6f75c7f 100644
--- a/db/module/linkis_query.sql
+++ b/db/module/linkis_query.sql
@@ -20,6 +20,7 @@
`updated_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT 'Update time',
`instances` varchar(250) DEFAULT NULL COMMENT 'Entrance instances',
`engine_type` varchar(32) DEFAULT NULL COMMENT 'Engine type',
+ `execution_code` text DEFAULT NULL COMMENT 'Job origin code or code path',
PRIMARY KEY (`id`),
KEY `created_time` (`created_time`),
KEY `submit_user` (`submit_user`)
diff --git a/linkis-commons/linkis-common/pom.xml b/linkis-commons/linkis-common/pom.xml
index 8cb56cc..9f723df 100644
--- a/linkis-commons/linkis-common/pom.xml
+++ b/linkis-commons/linkis-common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala
index cfcf4e3..c6e9a79 100644
--- a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala
+++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Logging.scala
@@ -1,63 +1,63 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.linkis.common.utils
-
-import org.slf4j.LoggerFactory
-
-
-trait Logging {
-
- protected lazy implicit val logger = LoggerFactory.getLogger(getClass)
-
- def trace(message: => String) = {
- if (logger.isTraceEnabled) {
- logger.trace(message.toString)
- }
- }
-
- def debug(message: => String): Unit = {
- if (logger.isDebugEnabled) {
- logger.debug(message.toString)
- }
- }
-
- def info(message: => String): Unit = {
- if (logger.isInfoEnabled) {
- logger.info(message.toString)
- }
- }
-
- def info(message: => String, t: Throwable): Unit = {
- logger.info(message.toString, t)
- }
-
- def warn(message: => String): Unit = {
- logger.warn(message.toString)
- }
-
- def warn(message: => String, t: Throwable): Unit = {
- logger.warn(message.toString, t)
- }
-
- def error(message: => String, t: Throwable): Unit = {
- logger.error(message.toString, t)
- }
-
- def error(message: => String): Unit = {
- logger.error(message.toString)
- }
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.common.utils
+
+import org.slf4j.LoggerFactory
+
+
+trait Logging {
+
+ protected lazy implicit val logger = LoggerFactory.getLogger(getClass)
+
+ def trace(message: => String) = {
+ if (logger.isTraceEnabled) {
+ logger.trace(message)
+ }
+ }
+
+ def debug(message: => String): Unit = {
+ if (logger.isDebugEnabled) {
+ logger.debug(message)
+ }
+ }
+
+ def info(message: => String): Unit = {
+ if (logger.isInfoEnabled) {
+ logger.info(message)
+ }
+ }
+
+ def info(message: => String, t: Throwable): Unit = {
+ logger.info(message, t)
+ }
+
+ def warn(message: => String): Unit = {
+ logger.warn(message)
+ }
+
+ def warn(message: => String, t: Throwable): Unit = {
+ logger.warn(message, t)
+ }
+
+ def error(message: => String, t: Throwable): Unit = {
+ logger.error(message, t)
+ }
+
+ def error(message: => String): Unit = {
+ logger.error(message)
+ }
}
\ No newline at end of file
diff --git a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala
index fba4433..1c0dacf 100644
--- a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala
+++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/ShutdownUtils.scala
@@ -37,7 +37,7 @@
def addShutdownHook(shutdownRunner: ShutdownRunner): Unit =
shutdownRunners synchronized shutdownRunners += shutdownRunner
- private val signals = Array("TERM", "HUP", "INT").map(new Signal(_))
+ private val signals = Array("TERM", "HUP", "INT").map(signal => Utils.tryQuietly(new Signal(signal))).filter(_ != null)
private val signalHandler = new SignalHandler {
override def handle(signal: Signal): Unit = {
val hooks = shutdownRunners.sortBy(_.order).toArray.map{
diff --git a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala
index 5c765eb..5e6b448 100644
--- a/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala
+++ b/linkis-commons/linkis-common/src/main/scala/com/webank/wedatasphere/linkis/common/utils/Utils.scala
@@ -47,7 +47,7 @@
System.exit(-1)
throw e
case er: Error =>
- error("Throw error", er.getCause)
+ error("Throw error", er)
throw er
case t => catchOp(t)
}
@@ -155,7 +155,7 @@
}
val defaultScheduler: ScheduledThreadPoolExecutor = {
- val scheduler = new ScheduledThreadPoolExecutor(20, threadFactory("BDP-Default-Scheduler-Thread-", true))
+ val scheduler = new ScheduledThreadPoolExecutor(20, threadFactory("Linkis-Default-Scheduler-Thread-", true))
scheduler.setMaximumPoolSize(20)
scheduler.setKeepAliveTime(5, TimeUnit.MINUTES)
scheduler
diff --git a/linkis-commons/linkis-hadoop-common/pom.xml b/linkis-commons/linkis-hadoop-common/pom.xml
index 0b1f8bc..a74f161 100644
--- a/linkis-commons/linkis-hadoop-common/pom.xml
+++ b/linkis-commons/linkis-hadoop-common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@@ -162,7 +162,19 @@
</exclusion>
<exclusion>
<groupId>org.codehaus.jackson</groupId>
- <artifactId>*</artifactId>
+ <artifactId>jackson-jaxrs</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-xc</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-core-asl</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.codehaus.jackson</groupId>
+ <artifactId>jackson-mapper-asl</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala
index 2cd9154..6ef0e32 100644
--- a/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala
+++ b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/conf/HadoopConf.scala
@@ -13,7 +13,7 @@
package com.webank.wedatasphere.linkis.hadoop.common.conf
-import com.webank.wedatasphere.linkis.common.conf.CommonVars
+import com.webank.wedatasphere.linkis.common.conf.{CommonVars, TimeType}
object HadoopConf {
@@ -32,4 +32,9 @@
val HADOOP_EXTERNAL_CONF_DIR_PREFIX = CommonVars("wds.linkis.hadoop.external.conf.dir.prefix", "/appcom/config/external-conf/hadoop")
+ val HDFS_ENABLE_CACHE = CommonVars("wds.linkis.hadoop.hdfs.cache.enable", false).getValue
+
+ val HDFS_ENABLE_CACHE_IDLE_TIME = CommonVars("wds.linkis.hadoop.hdfs.cache.idle.time", 3*60*1000).getValue
+
+ val HDFS_ENABLE_CACHE_MAX_TIME = CommonVars("wds.linkis.hadoop.hdfs.cache.max.time", new TimeType("12h")).getValue.toLong
}
diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala
new file mode 100644
index 0000000..c3109d1
--- /dev/null
+++ b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/entity/HDFSFileSystemContainer.scala
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.webank.wedatasphere.linkis.hadoop.common.entity
+
+import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf
+import org.apache.hadoop.fs.FileSystem
+
+class HDFSFileSystemContainer(fs: FileSystem, user: String) {
+
+ private var lastAccessTime: Long = System.currentTimeMillis()
+
+ private var count: Int = 0
+
+ def getFileSystem: FileSystem = this.fs
+
+ def getUser: String = this.user
+
+ def getLastAccessTime: Long = this.lastAccessTime
+
+ def updateLastAccessTime: Unit = {
+ this.lastAccessTime = System.currentTimeMillis()
+ }
+
+ def addAccessCount(): Unit = {
+ count = count + 1
+ }
+
+ def minusAccessCount(): Unit = count = count -1
+
+ def canRemove(): Boolean = {
+ val currentTime = System.currentTimeMillis()
+ val idleTime = currentTime - this.lastAccessTime
+ idleTime > HadoopConf.HDFS_ENABLE_CACHE_MAX_TIME || (System.currentTimeMillis() - this.lastAccessTime > HadoopConf.HDFS_ENABLE_CACHE_IDLE_TIME) && count <= 0
+ }
+
+}
diff --git a/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala
index 4d3ffae..4760f16 100644
--- a/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala
+++ b/linkis-commons/linkis-hadoop-common/src/main/scala/com/webank/wedatasphere/linkis/hadoop/common/utils/HDFSUtils.scala
@@ -16,17 +16,45 @@
import java.io.File
import java.nio.file.Paths
import java.security.PrivilegedExceptionAction
+import java.util.concurrent.{ConcurrentHashMap, TimeUnit}
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf
import com.webank.wedatasphere.linkis.hadoop.common.conf.HadoopConf.{hadoopConfDir, _}
+import com.webank.wedatasphere.linkis.hadoop.common.entity.HDFSFileSystemContainer
+import org.apache.commons.io.IOUtils
import org.apache.commons.lang.StringUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.security.UserGroupInformation
+import scala.collection.JavaConverters._
-object HDFSUtils {
+object HDFSUtils extends Logging {
+ private val fileSystemCache: java.util.Map[String, HDFSFileSystemContainer] = new java.util.HashMap[String, HDFSFileSystemContainer]()
+
+ private val LOCKER_SUFFIX = "_HDFS"
+
+ if (HadoopConf.HDFS_ENABLE_CACHE) {
+ info("HDFS Cache enabled ")
+ Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
+ override def run(): Unit = Utils.tryAndWarn {
+ fileSystemCache.values().asScala.filter { hdfsFileSystemContainer =>
+ hdfsFileSystemContainer.canRemove() && StringUtils.isNotBlank(hdfsFileSystemContainer.getUser)
+ }.foreach { hdfsFileSystemContainer =>
+ val locker = hdfsFileSystemContainer.getUser + LOCKER_SUFFIX
+ locker.intern() synchronized {
+ if (hdfsFileSystemContainer.canRemove()) {
+ fileSystemCache.remove(hdfsFileSystemContainer.getUser)
+ IOUtils.closeQuietly(hdfsFileSystemContainer.getFileSystem)
+ info(s"user${hdfsFileSystemContainer.getUser} to remove hdfsFileSystemContainer,because hdfsFileSystemContainer can remove")
+ }
+ }
+ }
+ }
+ }, 3 * 60 * 1000, 60 * 1000, TimeUnit.MILLISECONDS)
+ }
def getConfiguration(user: String): Configuration = getConfiguration(user, hadoopConfDir)
@@ -65,14 +93,46 @@
def getHDFSUserFileSystem(userName: String): FileSystem = getHDFSUserFileSystem(userName, getConfiguration(userName))
- def getHDFSUserFileSystem(userName: String, conf: org.apache.hadoop.conf.Configuration): FileSystem =
- getUserGroupInformation(userName)
- .doAs(new PrivilegedExceptionAction[FileSystem]{
+ def getHDFSUserFileSystem(userName: String, conf: org.apache.hadoop.conf.Configuration): FileSystem = if (HadoopConf.HDFS_ENABLE_CACHE) {
+ val locker = userName + LOCKER_SUFFIX
+ locker.intern().synchronized {
+ val hdfsFileSystemContainer = if (fileSystemCache.containsKey(userName)) {
+ fileSystemCache.get(userName)
+ } else {
+ val newHDFSFileSystemContainer = new HDFSFileSystemContainer(createFileSystem(userName, conf), userName)
+ fileSystemCache.put(userName, newHDFSFileSystemContainer)
+ newHDFSFileSystemContainer
+ }
+ hdfsFileSystemContainer.addAccessCount()
+ hdfsFileSystemContainer.updateLastAccessTime
+ hdfsFileSystemContainer.getFileSystem
+ }
+ } else {
+ createFileSystem(userName, conf)
+ }
+
+
+ def createFileSystem(userName: String, conf: org.apache.hadoop.conf.Configuration): FileSystem =
+ getUserGroupInformation(userName)
+ .doAs(new PrivilegedExceptionAction[FileSystem] {
def run = FileSystem.get(conf)
})
- def getUserGroupInformation(userName: String): UserGroupInformation ={
- if(KERBEROS_ENABLE.getValue) {
- val path = new File(KEYTAB_FILE.getValue , userName + ".keytab").getPath
+
+ def closeHDFSFIleSystem(fileSystem: FileSystem, userName: String): Unit = if (null != fileSystem && StringUtils.isNotBlank(userName)) {
+ if (HadoopConf.HDFS_ENABLE_CACHE) {
+ val hdfsFileSystemContainer = fileSystemCache.get(userName)
+ if (null != hdfsFileSystemContainer) {
+ val locker = userName + LOCKER_SUFFIX
+ locker synchronized hdfsFileSystemContainer.minusAccessCount()
+ }
+ } else {
+ fileSystem.close()
+ }
+ }
+
+ def getUserGroupInformation(userName: String): UserGroupInformation = {
+ if (KERBEROS_ENABLE.getValue) {
+ val path = new File(KEYTAB_FILE.getValue, userName + ".keytab").getPath
val user = getKerberosUser(userName)
UserGroupInformation.setConfiguration(getConfiguration(userName))
UserGroupInformation.loginUserFromKeytabAndReturnUGI(user, path)
@@ -83,7 +143,7 @@
def getKerberosUser(userName: String): String = {
var user = userName
- if(KEYTAB_HOST_ENABLED.getValue){
+ if (KEYTAB_HOST_ENABLED.getValue) {
user = user + "/" + KEYTAB_HOST.getValue
}
user
diff --git a/linkis-commons/linkis-httpclient/pom.xml b/linkis-commons/linkis-httpclient/pom.xml
index d077b85..e451558 100644
--- a/linkis-commons/linkis-httpclient/pom.xml
+++ b/linkis-commons/linkis-httpclient/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala
index def7421..97c5b6a 100644
--- a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala
+++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/AbstractHttpClient.scala
@@ -199,6 +199,7 @@
upload.inputStreams.foreach { case (k, v) =>
builder.addBinaryBody(k, v, ContentType.create("multipart/form-data"), k)
}
+ upload.binaryBodies.foreach(binaryBody => builder.addBinaryBody(binaryBody.parameterName, binaryBody.inputStream, binaryBody.contentType, binaryBody.fileName))
upload match {
case get: GetAction => get.getParameters.
retain((k, v) => v != null && k != null).
diff --git a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala
index a1ddb70..badf261 100644
--- a/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala
+++ b/linkis-commons/linkis-httpclient/src/main/scala/com/webank/wedatasphere/linkis/httpclient/request/UploadAction.scala
@@ -18,6 +18,8 @@
import java.util
+import org.apache.http.entity.ContentType
+
import scala.tools.nsc.interpreter.InputStream
@@ -27,7 +29,7 @@
* The file to be uploaded, the key is the parameter name, and the value is the file path.
* 需要上传的文件,key为参数名,value为文件路径
*/
- val files: util.Map[String, String]
+ @Deprecated val files: util.Map[String, String]
/**
* The inputStream that needs to be uploaded, the key is the parameter name, and the value is the input stream.
* 需要上传的输入流,key为参数名,value为输入流
@@ -37,7 +39,20 @@
* The inputStream that needs to be uploaded, the key is the parameter name, and the value is the fileName of inputStream.
* 需要上传的输入流,key为参数名,value为输入流的文件名
*/
- def inputStreamNames: util.Map[String, String] = new util.HashMap[String, String]()
+ @Deprecated def inputStreamNames: util.Map[String, String] = new util.HashMap[String, String]()
+ def binaryBodies: util.List[BinaryBody] = new util.ArrayList[BinaryBody](0)
def user: Option[String] = Option(getUser)
}
+
+case class BinaryBody(parameterName: String, inputStream: InputStream, fileName: String, contentType: ContentType)
+object BinaryBody {
+ def apply(parameterName: String, inputStream: InputStream, fileName: String, contentType: String): BinaryBody =
+ new BinaryBody(parameterName, inputStream, fileName, ContentType.create(contentType))
+
+ def apply(parameterName: String, inputStream: InputStream): BinaryBody =
+ new BinaryBody(parameterName, inputStream, null, ContentType.DEFAULT_BINARY)
+
+ def apply(parameterName: String, inputStream: InputStream, fileName: String): BinaryBody =
+ new BinaryBody(parameterName, inputStream, fileName, ContentType.DEFAULT_BINARY)
+}
\ No newline at end of file
diff --git a/linkis-commons/linkis-message-scheduler/pom.xml b/linkis-commons/linkis-message-scheduler/pom.xml
index 1d11019..b6f17e9 100644
--- a/linkis-commons/linkis-message-scheduler/pom.xml
+++ b/linkis-commons/linkis-message-scheduler/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-module/pom.xml b/linkis-commons/linkis-module/pom.xml
index eb45db3..a7bf7d4 100644
--- a/linkis-commons/linkis-module/pom.xml
+++ b/linkis-commons/linkis-module/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-mybatis/pom.xml b/linkis-commons/linkis-mybatis/pom.xml
index bc92c39..891b7b8 100644
--- a/linkis-commons/linkis-mybatis/pom.xml
+++ b/linkis-commons/linkis-mybatis/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-protocol/pom.xml b/linkis-commons/linkis-protocol/pom.xml
index 81f23d5..6e324b9 100644
--- a/linkis-commons/linkis-protocol/pom.xml
+++ b/linkis-commons/linkis-protocol/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java
index cb1875d..257344b 100644
--- a/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java
+++ b/linkis-commons/linkis-protocol/src/main/java/com/webank/wedatasphere/linkis/protocol/constants/TaskConstant.java
@@ -63,5 +63,6 @@
String EXECUTION_CONTENT = "executionContent";
String CODE = "code";
+ String REQUEST_IP = "requestIP";
}
diff --git a/linkis-commons/linkis-rpc/pom.xml b/linkis-commons/linkis-rpc/pom.xml
index 6b55c8b..ad62484 100644
--- a/linkis-commons/linkis-rpc/pom.xml
+++ b/linkis-commons/linkis-rpc/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-commons/linkis-rpc/src/main/scala/com/webank/wedatasphere/linkis/rpc/interceptor/common/CacheableRPCInterceptor.scala b/linkis-commons/linkis-rpc/src/main/scala/com/webank/wedatasphere/linkis/rpc/interceptor/common/CacheableRPCInterceptor.scala
index 90a8b3a..c0e36b8 100644
--- a/linkis-commons/linkis-rpc/src/main/scala/com/webank/wedatasphere/linkis/rpc/interceptor/common/CacheableRPCInterceptor.scala
+++ b/linkis-commons/linkis-rpc/src/main/scala/com/webank/wedatasphere/linkis/rpc/interceptor/common/CacheableRPCInterceptor.scala
@@ -16,13 +16,17 @@
package com.webank.wedatasphere.linkis.rpc.interceptor.common
-import java.util.concurrent.TimeUnit
+import java.util.concurrent.{Callable, TimeUnit}
import com.google.common.cache.{Cache, CacheBuilder, RemovalListener, RemovalNotification}
-import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.common.exception.WarnException
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.protocol.CacheableProtocol
import com.webank.wedatasphere.linkis.rpc.interceptor.{RPCInterceptor, RPCInterceptorChain, RPCInterceptorExchange}
import org.springframework.stereotype.Component
+import org.springframework.util.StringUtils
+import scala.tools.scalap.scalax.util.StringUtil
@Component
class CacheableRPCInterceptor extends RPCInterceptor with Logging{
@@ -38,18 +42,33 @@
override val order: Int = 10
override def intercept(interceptorExchange: RPCInterceptorExchange, chain: RPCInterceptorChain): Any = interceptorExchange.getProtocol match {
-// case cacheable: CacheableProtocol =>
-// guavaCache.get(cacheable.toString, new Callable[Any] {
-// override def call(): Any = {
-// val returnMsg = chain.handle(interceptorExchange)
-// returnMsg match {
-// case warn: WarnException =>
-// throw warn
-// case _ =>
-// returnMsg
-// }
-// }
-// })
+ case cacheable: CacheableProtocol =>
+ guavaCache.get(cacheable.toString, new Callable[Any] {
+ override def call(): Any = {
+ val returnMsg = chain.handle(interceptorExchange)
+ returnMsg match {
+ case warn: WarnException =>
+ throw warn
+ case _ =>
+ returnMsg
+ }
+ }
+ })
case _ => chain.handle(interceptorExchange)
}
+
+ def removeCache(cacheKey: String): Boolean ={
+ Utils.tryCatch{
+ if(!StringUtils.isEmpty(cacheKey)){
+ guavaCache.invalidate(cacheKey)
+ }
+ true
+ }{
+ case exception: Exception => {
+ warn(s"Failed to clean RPC cache, cache key:${cacheKey}", exception)
+ false
+ }
+ }
+ }
+
}
diff --git a/linkis-commons/linkis-scheduler/pom.xml b/linkis-commons/linkis-scheduler/pom.xml
index 698e6cb..4a21506 100644
--- a/linkis-commons/linkis-scheduler/pom.xml
+++ b/linkis-commons/linkis-scheduler/pom.xml
@@ -19,7 +19,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-scheduler</artifactId>
diff --git a/linkis-commons/linkis-storage/pom.xml b/linkis-commons/linkis-storage/pom.xml
index 7c59eb8..b88fe52 100644
--- a/linkis-commons/linkis-storage/pom.xml
+++ b/linkis-commons/linkis-storage/pom.xml
@@ -18,7 +18,7 @@
<modelVersion>4.0.0</modelVersion>
<parent>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
</parent>
diff --git a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/exception/StorageErrorCode.java b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/exception/StorageErrorCode.java
new file mode 100644
index 0000000..825277d
--- /dev/null
+++ b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/exception/StorageErrorCode.java
@@ -0,0 +1,37 @@
+package com.webank.wedatasphere.linkis.storage.exception;
+
+public enum StorageErrorCode {
+
+ /**
+ *
+ */
+ FS_NOT_INIT(53001,"please init first")
+ ;
+
+
+ StorageErrorCode(int errorCode, String message) {
+ this.code = errorCode;
+ this.message = message;
+ }
+
+ private int code;
+
+ private String message;
+
+ public int getCode() {
+ return code;
+ }
+
+ public void setCode(int code) {
+ this.code = code;
+ }
+
+ public String getMessage() {
+ return message;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+
+}
diff --git a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/factory/impl/BuildLocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/factory/impl/BuildLocalFileSystem.java
index 09baacb..71e2bd0 100644
--- a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/factory/impl/BuildLocalFileSystem.java
+++ b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/factory/impl/BuildLocalFileSystem.java
@@ -19,6 +19,7 @@
import com.webank.wedatasphere.linkis.storage.fs.impl.LocalFileSystem;
import com.webank.wedatasphere.linkis.storage.io.IOMethodInterceptorCreator$;
import com.webank.wedatasphere.linkis.storage.utils.StorageConfiguration;
+import com.webank.wedatasphere.linkis.storage.utils.StorageUtils;
import net.sf.cglib.proxy.Enhancer;
@@ -27,7 +28,7 @@
@Override
public Fs getFs(String user, String proxyUser){
FileSystem fs = null;
- if ((Boolean) StorageConfiguration.ENABLE_IO_PROXY().getValue()) {
+ if (StorageUtils.isIOProxy()) {
if(user.equals(proxyUser)){
if((Boolean) StorageConfiguration.IS_SHARE_NODE().getValue()){
fs = new LocalFileSystem();
diff --git a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/HDFSFileSystem.java b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/HDFSFileSystem.java
index 0a6d690..1f51175 100644
--- a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/HDFSFileSystem.java
+++ b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/HDFSFileSystem.java
@@ -298,7 +298,7 @@
@Override
public void close() throws IOException {
if (null != fs) {
- fs.close();
+ HDFSUtils.closeHDFSFIleSystem(fs, user);
} else {
logger.warn("FS was null, cannot close.");
}
diff --git a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/LocalFileSystem.java b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/LocalFileSystem.java
index 36157d9..5970b69 100644
--- a/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/LocalFileSystem.java
+++ b/linkis-commons/linkis-storage/src/main/java/com/webank/wedatasphere/linkis/storage/fs/impl/LocalFileSystem.java
@@ -22,6 +22,7 @@
import com.webank.wedatasphere.linkis.storage.exception.StorageWarnException;
import com.webank.wedatasphere.linkis.storage.fs.FileSystem;
import com.webank.wedatasphere.linkis.storage.utils.StorageConfiguration;
+import com.webank.wedatasphere.linkis.storage.utils.StorageUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
@@ -76,6 +77,10 @@
@Override
public boolean setOwner(FsPath dest, String user, String group) throws IOException {
+ if (!StorageUtils.isIOProxy()){
+ LOG.info("io not proxy, setOwner skip");
+ return true;
+ }
if (user != null) {
setOwner(dest, user);
}
@@ -88,6 +93,10 @@
@Override
public boolean setOwner(FsPath dest, String user) throws IOException {
+ if (!StorageUtils.isIOProxy()){
+ LOG.info("io not proxy, setOwner skip");
+ return true;
+ }
UserPrincipalLookupService lookupService =
FileSystems.getDefault().getUserPrincipalLookupService();
PosixFileAttributeView view =
@@ -100,6 +109,10 @@
@Override
public boolean setGroup(FsPath dest, String group) throws IOException {
+ if (!StorageUtils.isIOProxy()){
+ LOG.info("io not proxy, setGroup skip");
+ return true;
+ }
UserPrincipalLookupService lookupService =
FileSystems.getDefault().getUserPrincipalLookupService();
PosixFileAttributeView view =
@@ -169,6 +182,10 @@
@Override
public boolean setPermission(FsPath dest, String permission) throws IOException {
+ if (!StorageUtils.isIOProxy()){
+ LOG.info("io not proxy, setPermission skip");
+ return true;
+ }
String path = dest.getPath();
if(StringUtils.isNumeric(permission)) {
permission = FsPath.permissionFormatted(permission);
diff --git a/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSource.scala b/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSource.scala
index e18e284..5e0df6e 100644
--- a/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSource.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSource.scala
@@ -52,8 +52,7 @@
object FileSource {
- private val fileType = Array("dolphin", "sql", "scala", "py", "hql", "python", "out", "log", "text", "sh", "jdbc", "ngql", "psql")
-
+ private val fileType = Array("dolphin", "sql", "scala", "py", "hql", "python", "out", "log", "text", "sh", "jdbc", "ngql", "psql", "fql")
private val suffixPredicate = (path: String, suffix: String) => path.endsWith(s".$suffix")
def isResultSet(path: String): Boolean = {
diff --git a/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSplit.scala b/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSplit.scala
index 9acb83b..e9d2b2d 100644
--- a/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSplit.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/source/FileSplit.scala
@@ -65,11 +65,13 @@
def `while`[M](m: MetaData => M, r: Record => Unit): M = {
val metaData = fsReader.getMetaData
val t = m(metaData)
+ if (pageTrigger) {
+ fsReader.skip(start)
+ }
+ count = start
while (fsReader.hasNext && ifContinueRead) {
- if (ifStartRead) {
r(shuffler(fsReader.getRecord))
totalLine += 1
- }
count += 1
}
t
diff --git a/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/utils/StorageUtils.scala b/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/utils/StorageUtils.scala
index fa61ad5..f50c4e7 100644
--- a/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/utils/StorageUtils.scala
+++ b/linkis-commons/linkis-storage/src/main/scala/com/webank/wedatasphere/linkis/storage/utils/StorageUtils.scala
@@ -204,4 +204,8 @@
}
}
+ def isIOProxy(): Boolean = {
+ StorageConfiguration.ENABLE_IO_PROXY.getValue
+ }
+
}
diff --git a/linkis-commons/pom.xml b/linkis-commons/pom.xml
index ca535fa..3b2aa2c 100644
--- a/linkis-commons/pom.xml
+++ b/linkis-commons/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml
index d5c9f80..a17970c 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-application/pom.xml
@@ -23,7 +23,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis-cli</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>linkis-cli-application</artifactId>
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml
index b4370a4..39c1072 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-common/pom.xml
@@ -23,7 +23,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis-cli</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>linkis-cli-common</artifactId>
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml
index a1fd918..350c345 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml
+++ b/linkis-computation-governance/linkis-client/linkis-cli/linkis-cli-core/pom.xml
@@ -23,7 +23,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis-cli</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../pom.xml</relativePath>
</parent>
<artifactId>linkis-cli-core</artifactId>
diff --git a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml
index 926d776..6cbcda2 100644
--- a/linkis-computation-governance/linkis-client/linkis-cli/pom.xml
+++ b/linkis-computation-governance/linkis-client/linkis-cli/pom.xml
@@ -23,7 +23,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-cli</artifactId>
<packaging>pom</packaging>
diff --git a/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml b/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml
index e398738..c3a41bf 100644
--- a/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml
+++ b/linkis-computation-governance/linkis-client/linkis-computation-client/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-computation-client</artifactId>
diff --git a/linkis-computation-governance/linkis-computation-governance-common/pom.xml b/linkis-computation-governance/linkis-computation-governance-common/pom.xml
index 3c8bb40..3d53067 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/pom.xml
+++ b/linkis-computation-governance/linkis-computation-governance-common/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/job/JobRequest.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/job/JobRequest.java
index 198ecde..42cb4ad 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/job/JobRequest.java
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/job/JobRequest.java
@@ -185,4 +185,17 @@
public void setMetrics(Map<String, Object> metrics) {
this.metrics = metrics;
}
+
+ @Override
+ public String toString() {
+ return "JobRequest{" +
+ "id=" + id +
+ ", reqId='" + reqId + '\'' +
+ ", submitUser='" + submitUser + '\'' +
+ ", executeUser='" + executeUser + '\'' +
+ ", labels=" + labels +
+ ", params=" + params +
+ ", status=" + status +
+ '}';
+ }
}
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/task/ResponsePersist.java b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/task/ResponsePersist.java
index 21c926e..ab3e1c7 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/task/ResponsePersist.java
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/java/com/webank/wedatasphere/linkis/governance/common/entity/task/ResponsePersist.java
@@ -31,7 +31,7 @@
private String msg;
private Map<String, Object> data;
- public Integer getStatus() {
+ public Integer getStatus() {
return status;
}
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/paser/CodeParser.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/paser/CodeParser.scala
index 38bd5af..eaabbc1 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/paser/CodeParser.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/paser/CodeParser.scala
@@ -233,14 +233,19 @@
if (code.contains("limit")) code = code.substring(code.lastIndexOf("limit")).trim
else if (code.contains("LIMIT")) code = code.substring(code.lastIndexOf("LIMIT")).trim.toLowerCase
else return true
- val hasLimit = code.matches("limit\\s+\\d+\\s*;?")
- if (hasLimit) {
- if (code.indexOf(";") > 0) code = code.substring(5, code.length - 1).trim
- else code = code.substring(5).trim
- val limitNum = code.toInt
- if (limitNum > defaultLimit) throw new IllegalArgumentException("We at most allowed to limit " + defaultLimit + ", but your SQL has been over the max rows.")
- }
- !hasLimit
+ code.matches("limit\\s+\\d+\\s*;?")
+ /**
+ * com.webank.wedatasphere.linkis.entrance.interceptor.impl.Explain has convert and check the limit for code,
+ * so the check here is redundancy
+ */
+ /* val hasLimit = code.matches("limit\\s+\\d+\\s*;?")
+ if (hasLimit) {
+ if (code.indexOf(";") > 0) code = code.substring(5, code.length - 1).trim
+ else code = code.substring(5).trim
+ val limitNum = code.toInt
+ if (limitNum > defaultLimit) throw new IllegalArgumentException("We at most allowed to limit " + defaultLimit + ", but your SQL has been over the max rows.")
+ }
+ !hasLimit*/
}
}
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/conf/RequestQueryGlobalConfig.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/conf/RequestQueryGlobalConfig.scala
index 2f328c6..2978b08 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/conf/RequestQueryGlobalConfig.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/conf/RequestQueryGlobalConfig.scala
@@ -23,9 +23,17 @@
trait ConfigProtocol
-case class RequestQueryGlobalConfig(username: String) extends CacheableProtocol with RetryableProtocol with ConfigProtocol with RequestProtocol
+case class RequestQueryGlobalConfig(username: String) extends CacheableProtocol with RetryableProtocol with ConfigProtocol with RequestProtocol {
+ override def toString: String = {
+ RequestQueryGlobalConfig.getClass.getName + "," + username
+ }
+}
-case class RequestQueryEngineConfig(userCreatorLabel: UserCreatorLabel, engineTypeLabel: EngineTypeLabel, filter: String = null) extends CacheableProtocol with RetryableProtocol with ConfigProtocol
+case class RequestQueryEngineConfig(userCreatorLabel: UserCreatorLabel, engineTypeLabel: EngineTypeLabel, filter: String = null) extends CacheableProtocol with RetryableProtocol with ConfigProtocol{
+ override def toString: String = {
+ RequestQueryGlobalConfig.getClass.getName + "," + userCreatorLabel.getStringValue + "," + engineTypeLabel.getStringValue
+ }
+}
case class RequestQueryEngineTypeDefault(engineTypeLabel: EngineTypeLabel) extends CacheableProtocol with RetryableProtocol with ConfigProtocol
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobDetailReqProcotol.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobDetailReqProcotol.scala
index 39432c7..5266c9a 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobDetailReqProcotol.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobDetailReqProcotol.scala
@@ -1,9 +1,11 @@
package com.webank.wedatasphere.linkis.governance.common.protocol.job
+import java.util
+
import com.webank.wedatasphere.linkis.governance.common.entity.job.{SubJobDetail, SubJobInfo}
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol
-
import java.util.Date
+
import scala.beans.BeanProperty
@@ -13,6 +15,8 @@
case class JobDetailReqUpdate(jobInfo: SubJobInfo) extends JobDetailReq
+case class JobDetailReqBatchUpdate(jobInfo: util.ArrayList[SubJobInfo]) extends JobDetailReq
+
case class JobDetailReqQuery(jobReq: SubJobDetail) extends JobDetailReq
case class JobDetailReqReadAll(jobReq: SubJobDetail) extends JobDetailReq
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobReqProcotol.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobReqProcotol.scala
index 7972ff6..2ffc78a 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobReqProcotol.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/job/JobReqProcotol.scala
@@ -1,9 +1,11 @@
package com.webank.wedatasphere.linkis.governance.common.protocol.job
+import java.util
+
import com.webank.wedatasphere.linkis.governance.common.entity.job.JobRequest
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol
-
import java.util.Date
+
import scala.beans.BeanProperty
@@ -13,6 +15,8 @@
case class JobReqUpdate(jobReq: JobRequest) extends JobReq
+case class JobReqBatchUpdate(jobReq: util.ArrayList[JobRequest]) extends JobReq
+
case class JobReqQuery(jobReq: JobRequest) extends JobReq
case class JobReqReadAll(jobReq: JobRequest) extends JobReq
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/task/RequestTask.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/task/RequestTask.scala
index 5e6c64e..afd2550 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/task/RequestTask.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/protocol/task/RequestTask.scala
@@ -31,6 +31,7 @@
def data(key: String, value: Object): Unit
def getLabels: util.List[Label[_]]
def setLabels(labels: util.List[Label[_]])
+ def getSourceID(): String
}
object RequestTask {
private val header = "#rt_"
@@ -43,6 +44,8 @@
private var labels: util.List[Label[_]] = new util.ArrayList[Label[_]](4)
+ private var sourceID: String = _
+
override def getCode: String = code
override def setCode(code: String): Unit = this.code = code
@@ -73,6 +76,10 @@
}
override def toString = s"RequestTaskExecute(code=${getCodeByLimit()}, lock=$lock, properties=$properties, labels=$labels)"
+
+ override def getSourceID(): String = sourceID
+
+ def setSourceID(sourceID: String): Unit = this.sourceID = sourceID
}
trait TaskState extends RequestProtocol {}
diff --git a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/utils/OnceExecutorContentUtils.scala b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/utils/OnceExecutorContentUtils.scala
index cfe19b0..8962b41 100644
--- a/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/utils/OnceExecutorContentUtils.scala
+++ b/linkis-computation-governance/linkis-computation-governance-common/src/main/scala/com/webank/wedatasphere/linkis/governance/common/utils/OnceExecutorContentUtils.scala
@@ -15,8 +15,9 @@
def resourceToValue(bmlResource: BmlResource): String = {
val resourceId = bmlResource.getResourceId
- if(resourceId.length > LEN) throw new GovernanceErrorException(40108, s"Invalid resourceId $resourceId, it is too length.")
- val len = if(resourceId.length < LEN) "0" * (LEN - resourceId.length) + resourceId.length
+ val resourceIdLength = resourceId.length.toString.length
+ if(resourceIdLength > LEN) throw new GovernanceErrorException(40108, s"Invalid resourceId $resourceId, it is too length.")
+ val len = if(resourceIdLength < LEN) "0" * (LEN - resourceIdLength) + resourceId.length
HEADER + len + resourceId + bmlResource.getVersion
}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-linux-launch/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-linux-launch/pom.xml
index df87795..53f8e19 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-linux-launch/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-linux-launch/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml
index fa010d9..6b13d41 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-core/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml
index 3af6896..8078b08 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/com/webank/wedatasphere/linkis/ecm/server/hook/JarUDFLoadECMHook.scala b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/com/webank/wedatasphere/linkis/ecm/server/hook/JarUDFLoadECMHook.scala
index d672a8d..7e13daf 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/com/webank/wedatasphere/linkis/ecm/server/hook/JarUDFLoadECMHook.scala
+++ b/linkis-computation-governance/linkis-engineconn-manager/linkis-engineconn-manager-server/src/main/scala/com/webank/wedatasphere/linkis/ecm/server/hook/JarUDFLoadECMHook.scala
@@ -30,7 +30,7 @@
request match {
case pel: ProcessEngineConnLaunchRequest =>
info("start loading UDFs")
- val udfInfos = UDFClient.getUdfInfos(request.user).filter{ info => info.getUdfType == 0 && info.getExpire == false && StringUtils.isNotBlank(info.getPath) && info.getLoad == true }
+ val udfInfos = UDFClient.getUdfInfos(request.user,"udf").filter{ info => info.getUdfType == 0 && info.getExpire == false && StringUtils.isNotBlank(info.getPath) && info.getLoad == true }
udfInfos.foreach{ udfInfo =>
LaunchConstants.addPathToClassPath(pel.environment, udfInfo.getPath)
}
diff --git a/linkis-computation-governance/linkis-engineconn-manager/pom.xml b/linkis-computation-governance/linkis-engineconn-manager/pom.xml
index 8248c5d..50c68e8 100644
--- a/linkis-computation-governance/linkis-engineconn-manager/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn-manager/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml
index 7a044d8..673e901 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/OnceExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/OnceExecutor.scala
index e505029..12cad18 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/OnceExecutor.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/OnceExecutor.scala
@@ -30,13 +30,12 @@
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
import com.webank.wedatasphere.linkis.manager.label.entity.{JobLabel, Label}
-import com.webank.wedatasphere.linkis.scheduler.executer.{AsynReturnExecuteResponse, ExecuteResponse}
+import com.webank.wedatasphere.linkis.scheduler.executer.{AsynReturnExecuteResponse, ErrorExecuteResponse, ExecuteResponse, SuccessExecuteResponse}
import org.apache.commons.io.IOUtils
import org.apache.commons.lang.StringUtils
-import scala.collection.mutable.ArrayBuffer
import scala.collection.convert.wrapAsScala._
-
+import scala.collection.mutable.ArrayBuffer
trait OnceExecutor extends ExecutableExecutor[ExecuteResponse] with LabelExecutor with Logging {
@@ -121,7 +120,7 @@
protected def waitToRunning(): Unit
- def waitForComplete(): Unit = notifyListeners synchronized wait()
+ def waitForComplete(): Unit = this synchronized wait()
def getResponse: ExecuteResponse = response
@@ -129,7 +128,11 @@
override protected def onStatusChanged(fromStatus: NodeStatus, toStatus: NodeStatus): Unit = {
if(NodeStatus.isCompleted(toStatus)) {
- Utils.tryFinally(notifyListeners.foreach(_(getResponse)))(notifyListeners synchronized notifyAll)
+ if(response == null) toStatus match {
+ case NodeStatus.Success => response = SuccessExecuteResponse()
+ case _ => response = ErrorExecuteResponse("Unknown reason.", null)
+ }
+ Utils.tryFinally(notifyListeners.foreach(_(getResponse)))(this synchronized notifyAll)
}
super.onStatusChanged(fromStatus, toStatus)
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/creation/OnceExecutorFactory.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/creation/OnceExecutorFactory.scala
index e53dcec..9cab0a5 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/creation/OnceExecutorFactory.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/creation/OnceExecutorFactory.scala
@@ -19,10 +19,9 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.core.EngineConnObject
import com.webank.wedatasphere.linkis.engineconn.core.creation.AbstractCodeLanguageLabelExecutorFactory
import com.webank.wedatasphere.linkis.engineconn.once.executor.OnceExecutor
-import com.webank.wedatasphere.linkis.manager.engineplugin.common.creation.{MultiExecutorEngineConnFactory, SingleExecutorEngineConnFactory}
+import com.webank.wedatasphere.linkis.engineconn.once.executor.execution.OnceEngineConnExecution
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnMode._
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnModeLabel
@@ -36,17 +35,10 @@
labels: Array[Label[_]]): OnceExecutor
override def canCreate(labels: Array[Label[_]]): Boolean =
- super.canCreate(labels) || (labels.exists {
+ super.canCreate(labels) && labels.exists {
case engineConnModeLabel: EngineConnModeLabel =>
val mode: EngineConnMode = engineConnModeLabel.getEngineConnMode
- Array(Once, Computation_With_Once, Once_With_Cluster).contains(mode)
- case _ => false
- } && onlyOneOnceExecutorFactory())
-
- private def onlyOneOnceExecutorFactory(): Boolean = EngineConnObject.getEngineConnPlugin.getEngineConnFactory match {
- case _: SingleExecutorEngineConnFactory with OnceExecutorFactory => true
- case engineConnFactory: MultiExecutorEngineConnFactory =>
- engineConnFactory.getExecutorFactories.count(_.isInstanceOf[OnceExecutorFactory]) == 1
+ OnceEngineConnExecution.getSupportedEngineConnModes.contains(mode)
case _ => false
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/execution/OnceEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/execution/OnceEngineConnExecution.scala
index a448cd8..77a1f89 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/execution/OnceEngineConnExecution.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-once-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/once/executor/execution/OnceEngineConnExecution.scala
@@ -81,8 +81,7 @@
case _ => false
}
- override protected def getSupportedEngineConnModes: Array[EngineConnMode] =
- Array(Once, Computation_With_Once, Once_With_Cluster)
+ override protected def getSupportedEngineConnModes: Array[EngineConnMode] = OnceEngineConnExecution.getSupportedEngineConnModes
override protected def getReturnEngineConnModes: Array[EngineConnMode] = Array(Once)
@@ -91,6 +90,11 @@
*
* @return
*/
- override def getOrder: Int = 2
+ override def getOrder: Int = 100
+
+}
+object OnceEngineConnExecution {
+
+ def getSupportedEngineConnModes: Array[EngineConnMode] = Array(Once, Computation_With_Once, Once_With_Cluster)
}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml
index cc70711..fe54fa1 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-clustered-engineconn/linkis-streaming-engineconn/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
index d2889cc..4437af1 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala
index 82c69bc..ca92fa4 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/conf/ComputationExecutorConf.scala
@@ -29,5 +29,7 @@
val ENGINE_MAX_TASK_EXECUTE_NUM = CommonVars("wds.linkis.engineconn.max.task.execute.num", 0)
- val ENGINE_PROGRESS_FETCH_INTERVAL = CommonVars("wds.linkis.engineconn.progresss.fetch.interval-in-seconds", 3)
+ val ENGINE_PROGRESS_FETCH_INTERVAL = CommonVars("wds.linkis.engineconn.progresss.fetch.interval-in-seconds", 5)
+
+ val UDF_LOAD_FAILED_IGNORE = CommonVars("wds.linkis.engineconn.udf.load.ignore", true)
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorFactory.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorFactory.scala
index bd47936..64e47fb 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorFactory.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorFactory.scala
@@ -20,9 +20,11 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.ComputationExecutor
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.{ComputationEngineConnExecution, ComputationExecutor}
import com.webank.wedatasphere.linkis.engineconn.core.creation.AbstractCodeLanguageLabelExecutorFactory
import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnMode.EngineConnMode
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnModeLabel
trait ComputationExecutorFactory extends AbstractCodeLanguageLabelExecutorFactory {
@@ -32,4 +34,16 @@
engineConn: EngineConn,
labels: Array[Label[_]]): ComputationExecutor
+ override def canCreate(labels: Array[Label[_]]): Boolean = {
+ val canCreateIt = super.canCreate(labels)
+ if(!canCreateIt) return false
+ val existsEngineConnMode = labels.exists(_.isInstanceOf[EngineConnModeLabel])
+ if(!existsEngineConnMode) return true
+ labels.exists {
+ case engineConnModeLabel: EngineConnModeLabel =>
+ val mode: EngineConnMode = engineConnModeLabel.getEngineConnMode
+ ComputationEngineConnExecution.getSupportedEngineConnModes.contains(mode)
+ case _ => false
+ }
+ }
}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorManager.scala
index 3d90a2a..dcfe355 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorManager.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/creation/ComputationExecutorManager.scala
@@ -20,14 +20,12 @@
import java.util.concurrent.TimeUnit
-import com.webank.wedatasphere.linkis.DataWorkCloudApplication
import com.webank.wedatasphere.linkis.common.exception.ErrorException
import com.webank.wedatasphere.linkis.common.utils.{Logging, RetryHandler, Utils}
import com.webank.wedatasphere.linkis.engineconn.common.conf.EngineConnConf
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.ComputationExecutor
import com.webank.wedatasphere.linkis.engineconn.core.engineconn.EngineConnManager
import com.webank.wedatasphere.linkis.engineconn.core.executor.{ExecutorManager, LabelExecutorManager, LabelExecutorManagerImpl}
-import com.webank.wedatasphere.linkis.engineconn.executor.conf.EngineConnExecutorConfiguration
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.entity.engine.CodeLanguageLabel
@@ -38,16 +36,13 @@
def getDefaultExecutor: ComputationExecutor
- override def getReportExecutor: ComputationExecutor
+ //override def getReportExecutor: ComputationExecutor
}
object ComputationExecutorManager {
- DataWorkCloudApplication.setProperty(EngineConnExecutorConfiguration.EXECUTOR_MANAGER_CLASS.key,
- "com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManagerImpl")
-
- private val executorManager = ExecutorManager.getInstance match {
+ private lazy val executorManager = ExecutorManager.getInstance match {
case manager: ComputationExecutorManager => manager
}
@@ -63,9 +58,8 @@
if(defaultExecutor != null) return defaultExecutor
synchronized {
if (null == defaultExecutor || defaultExecutor.isClosed) {
- val engineConn = EngineConnManager.getEngineConnManager.getEngineConn
- if (null == engineConn) {
- Utils.waitUntil(() => null != engineConn, Duration.apply(EngineConnConf.ENGINE_CONN_CREATION_WAIT_TIME.getValue.toLong, TimeUnit.MILLISECONDS))
+ if (null == EngineConnManager.getEngineConnManager.getEngineConn) {
+ Utils.waitUntil(() => null != EngineConnManager.getEngineConnManager.getEngineConn, Duration.apply(EngineConnConf.ENGINE_CONN_CREATION_WAIT_TIME.getValue.toLong, TimeUnit.MILLISECONDS))
error(s"Create default executor failed, engineConn not ready after ${EngineConnConf.ENGINE_CONN_CREATION_WAIT_TIME.getValue.toString}.")
return null
}
@@ -79,10 +73,10 @@
}
}
- override def getReportExecutor: ComputationExecutor = if(getExecutors.isEmpty) getDefaultExecutor
+ /*override def getReportExecutor: ComputationExecutor = if(getExecutors.isEmpty) getDefaultExecutor
else getExecutors.maxBy {
- case computationExecutor: ComputationExecutor => computationExecutor.getLastActivityTime
- }.asInstanceOf[ComputationExecutor]
+ case computationExecutor: ComputationExecutor => computationExecutor.getStatus.ordinal()
+ }.asInstanceOf[ComputationExecutor]*/
override protected def getLabelKey(labels: Array[Label[_]]): String = {
labels.foreach {
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/cs/CSEnginePreExecuteHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/cs/CSEnginePreExecuteHook.scala
index 4a2fe80..0edb185 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/cs/CSEnginePreExecuteHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/cs/CSEnginePreExecuteHook.scala
@@ -19,6 +19,7 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationExecutorHook
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis.ComputationEngineConstant
class CSEnginePreExecuteHook extends ComputationExecutorHook with Logging {
@@ -27,6 +28,8 @@
override def getHookName: String = "ContextServicePreHook"
+ override def getOrder(): Int = ComputationEngineConstant.CS_HOOK_ORDER
+
override def beforeExecutorExecute(engineExecutionContext: EngineExecutionContext, engineCreationContext: EngineCreationContext, code: String): String = {
val props = engineExecutionContext.getProperties
if (null != props && props.containsKey(CSCommonUtils.CONTEXT_ID_STR)) {
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationEngineConnExecution.scala
index ed09a2c..917948f 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationEngineConnExecution.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationEngineConnExecution.scala
@@ -16,17 +16,21 @@
package com.webank.wedatasphere.linkis.engineconn.computation.executor.execute
+import com.webank.wedatasphere.linkis.DataWorkCloudApplication
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
+import com.webank.wedatasphere.linkis.engineconn.common.execution.EngineConnExecution
import com.webank.wedatasphere.linkis.engineconn.core.execution.AbstractEngineConnExecution
+import com.webank.wedatasphere.linkis.engineconn.executor.conf.EngineConnExecutorConfiguration
import com.webank.wedatasphere.linkis.engineconn.executor.entity.Executor
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnMode._
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnModeLabel
class ComputationEngineConnExecution extends AbstractEngineConnExecution {
override protected def getSupportedEngineConnModes: Array[EngineConnMode] =
- Array(Computation, Computation_With_Once)
+ ComputationEngineConnExecution.getSupportedEngineConnModes
override protected def doExecution(executor: Executor,
engineCreationContext: EngineCreationContext,
@@ -39,5 +43,33 @@
*
* @return
*/
- override def getOrder: Int = 10
+ override def getOrder: Int = 200
}
+
+object ComputationEngineConnExecution {
+
+ def getSupportedEngineConnModes: Array[EngineConnMode] = Array(Computation, Computation_With_Once)
+
+}
+
+import scala.collection.convert.decorateAsScala._
+class ComputationExecutorManagerEngineConnExecution extends EngineConnExecution {
+ override def execute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {
+ var shouldSet = true
+ engineCreationContext.getLabels().asScala.foreach {
+ case engineConnModeLabel: EngineConnModeLabel =>
+ val mode = toEngineConnMode(engineConnModeLabel.getEngineConnMode)
+ shouldSet = ComputationEngineConnExecution.getSupportedEngineConnModes.contains(mode)
+ case _ => false
+ }
+ if(shouldSet) DataWorkCloudApplication.setProperty(EngineConnExecutorConfiguration.EXECUTOR_MANAGER_CLASS.key,
+ "com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManagerImpl")
+}
+
+ /**
+ * The smallest got the first execution opportunity.
+ *
+ * @return
+ */
+ override def getOrder: Int = 5
+}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala
index 7462cb1..6abf2a7 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ComputationExecutor.scala
@@ -17,9 +17,11 @@
package com.webank.wedatasphere.linkis.engineconn.computation.executor.execute
import java.util.concurrent.TimeUnit
+import java.util.concurrent.atomic.AtomicInteger
import com.google.common.cache.{Cache, CacheBuilder}
import com.webank.wedatasphere.linkis.DataWorkCloudApplication
+import com.webank.wedatasphere.linkis.common.log.LogUtils
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.entity.AccessibleExecutor
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.listener.event.TaskStatusChangedEvent
@@ -28,6 +30,7 @@
import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
import com.webank.wedatasphere.linkis.engineconn.computation.executor.entity.EngineConnTask
import com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationExecutorHook
+import com.webank.wedatasphere.linkis.engineconn.core.EngineConnObject
import com.webank.wedatasphere.linkis.engineconn.core.engineconn.EngineConnManager
import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineconn.executor.entity.{LabelExecutor, ResourceExecutor}
@@ -55,20 +58,24 @@
private var engineInitialized: Boolean = false
+ private var internalExecute: Boolean = false
+
private var codeParser: Option[CodeParser] = None
- private var runningTasks: Int = 0
+ private var runningTasks: Count = new Count
- private var pendingTasks: Int = 0
+ private var pendingTasks: Count = new Count
- private var succeedTasks: Int = 0
+ private var succeedTasks: Count = new Count
- private var failedTasks: Int = 0
+ private var failedTasks: Count = new Count
private var lastTask: EngineConnTask = _
private val MAX_TASK_EXECUTE_NUM = ComputationExecutorConf.ENGINE_MAX_TASK_EXECUTE_NUM.getValue
+ protected def setInitialized(inited: Boolean = true): Unit = this.engineInitialized = inited
+
final override def tryReady(): Boolean = {
transition(NodeStatus.Unlock)
if (!engineInitialized) {
@@ -80,12 +87,12 @@
override def init(): Unit = {
- info(s"Executor($getId) is inited.")
+ setInitialized()
+ info(s"Executor($getId) inited : ${isEngineInitialized}")
}
def tryShutdown(): Boolean = {
- this.ensureAvailable(transition(NodeStatus.ShuttingDown))
- close()
+ transition(NodeStatus.ShuttingDown)
true
}
@@ -96,16 +103,18 @@
override def trySucceed(): Boolean = false
- def getSucceedNum: Int = succeedTasks
+ def getSucceedNum: Int = succeedTasks.getCount()
- def getFailedNum: Int = failedTasks
+ def getFailedNum: Int = failedTasks.getCount()
- def getRunningTask: Int = runningTasks
+ def getRunningTask: Int = runningTasks.getCount()
- protected def getExecutorConcurrentInfo: EngineConcurrentInfo = EngineConcurrentInfo(runningTasks, pendingTasks, succeedTasks, failedTasks)
+ protected def getExecutorConcurrentInfo: EngineConcurrentInfo = EngineConcurrentInfo(getRunningTask, 0, getSucceedNum, getFailedNum)
def isEngineInitialized: Boolean = engineInitialized
+ def isInternalExecute: Boolean = internalExecute
+
protected def callback(): Unit = {}
override def close(): Unit = {
@@ -119,35 +128,30 @@
// override def getName: String = ComputationExecutorConf.DEFAULT_COMPUTATION_NAME
- protected def ensureOp[A](f: => A): A = if (!this.engineInitialized)
+ protected def ensureOp[A](f: => A): A = if (!isEngineInitialized)
f
else ensureIdle(f)
protected def beforeExecute(engineConnTask: EngineConnTask): Unit = {}
protected def afterExecute(engineConnTask: EngineConnTask, executeResponse: ExecuteResponse): Unit = {
- val executorNumber = succeedTasks + failedTasks
- if (MAX_TASK_EXECUTE_NUM > 0 && runningTasks == 0 && executorNumber > MAX_TASK_EXECUTE_NUM) {
+ val executorNumber = getSucceedNum + getFailedNum
+ if (MAX_TASK_EXECUTE_NUM > 0 && runningTasks.getCount() == 0 && executorNumber > MAX_TASK_EXECUTE_NUM) {
error(s"Task has reached max execute number $MAX_TASK_EXECUTE_NUM, now tryShutdown. ")
- ComputationExecutorManager.getInstance.getReportExecutor.tryShutdown()
+ ExecutorManager.getInstance.getReportExecutor.tryShutdown()
}
}
- def execute(engineConnTask: EngineConnTask): ExecuteResponse = {
- updateLastActivityTime()
- beforeExecute(engineConnTask)
- runningTasks += 1
- taskCache.put(engineConnTask.getTaskId, engineConnTask)
- lastTask = engineConnTask
-
+ def toExecuteTask(engineConnTask: EngineConnTask, internalExecute: Boolean = false): ExecuteResponse = {
+ runningTasks.increase()
+ this.internalExecute = internalExecute
+ Utils.tryFinally{
transformTaskStatus(engineConnTask, ExecutionNodeStatus.Running)
-
- ensureOp {
val engineExecutionContext = createEngineExecutionContext(engineConnTask)
var hookedCode = engineConnTask.getCode
Utils.tryCatch {
- val engineCreationContext = EngineConnManager.getEngineConnManager.getEngineConn.getEngineCreationContext
+ val engineCreationContext = EngineConnObject.getEngineCreationContext
ComputationExecutorHook.getComputationExecutorHooks.foreach(hook => {
hookedCode = hook.beforeExecutorExecute(engineExecutionContext, engineCreationContext, hookedCode)
})
@@ -157,8 +161,8 @@
} else {
info(s"hooked after code: $hookedCode ")
}
- val localPath = System.getenv(EngineConnConf.ENGINE_CONN_LOCAL_LOG_DIRS_KEY.getValue)
- engineExecutionContext.appendStdout(s"EngineConn local log path : ${DataWorkCloudApplication.getServiceInstance.toString} $localPath")
+ val localPath = EngineConnConf.getLogDir
+ engineExecutionContext.appendStdout(LogUtils.generateInfo(s"EngineConn local log path: ${DataWorkCloudApplication.getServiceInstance.toString} $localPath"))
var response: ExecuteResponse = null
val incomplete = new StringBuilder
val codes = Utils.tryCatch(getCodeParser.map(_.parse(hookedCode)).getOrElse(Array(hookedCode))) { e =>
@@ -176,11 +180,11 @@
) {
t => ErrorExecuteResponse(ExceptionUtils.getRootCauseMessage(t), t)
}
- info(s"Finished to execute task ${engineConnTask.getTaskId}")
+ //info(s"Finished to execute task ${engineConnTask.getTaskId}")
incomplete ++= code
response match {
case e: ErrorExecuteResponse =>
- failedTasks += 1
+ failedTasks.increase()
error("execute code failed!", e.t)
return response
case SuccessExecuteResponse() =>
@@ -199,27 +203,45 @@
Utils.tryCatch(engineExecutionContext.close()) {
t =>
response = ErrorExecuteResponse("send resultSet to entrance failed!", t)
- failedTasks += 1
+ failedTasks.increase()
}
- runningTasks -= 1
- lastTask = null
+
+
response = response match {
case _: OutputExecuteResponse =>
- succeedTasks += 1
+ succeedTasks.increase()
transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed)
SuccessExecuteResponse()
case s: SuccessExecuteResponse =>
- succeedTasks += 1
+ succeedTasks.increase()
transformTaskStatus(engineConnTask, ExecutionNodeStatus.Succeed)
s
case _ => response
}
- Utils.tryAndWarn(afterExecute(engineConnTask, response))
response
+ }{
+ runningTasks.decrease()
+ this.internalExecute = false
+ }
+ }
+
+
+
+ def execute(engineConnTask: EngineConnTask): ExecuteResponse = {
+ info(s"start to execute task ${engineConnTask.getTaskId}")
+ updateLastActivityTime()
+ beforeExecute(engineConnTask)
+ taskCache.put(engineConnTask.getTaskId, engineConnTask)
+ lastTask = engineConnTask
+ val response = ensureOp {
+ toExecuteTask(engineConnTask)
}
-
- }
+ Utils.tryAndWarn(afterExecute(engineConnTask, response))
+ info(s"Finished to execute task ${engineConnTask.getTaskId}")
+ lastTask = null
+ response
+ }
def setCodeParser(codeParser: CodeParser): Unit = this.codeParser = Some(codeParser)
@@ -256,7 +278,7 @@
def killTask(taskId: String): Unit = {
Utils.tryAndWarn {
- val task = taskCache.getIfPresent(taskId)
+ val task = getTaskById(taskId)
if (null != task) {
task.setStatus(ExecutionNodeStatus.Cancelled)
transformTaskStatus(task, ExecutionNodeStatus.Cancelled)
@@ -281,7 +303,7 @@
case _ =>
error(s"Task status change error. task: $task, newStatus : $newStatus.")
}
- if (oriStatus != newStatus) {
+ if (oriStatus != newStatus && !isInternalExecute) {
listenerBusContext.getEngineConnSyncListenerBus.postToAll(TaskStatusChangedEvent(task.getTaskId, oriStatus, newStatus))
}
}
@@ -290,4 +312,23 @@
taskCache.getIfPresent(taskId)
}
+ def clearTaskCache(taskId: String): Unit = {
+ taskCache.invalidate(taskId)
+ }
+}
+
+class Count{
+
+ val count = new AtomicInteger(0)
+
+ def getCount(): Int = {
+ count.get()
+ }
+
+ def increase() : Unit = {
+ count.incrementAndGet()
+ }
+ def decrease(): Unit = {
+ count.decrementAndGet()
+ }
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala
index e51df44..d3bb8b5 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/ConcurrentComputationExecutor.scala
@@ -34,6 +34,7 @@
transition(NodeStatus.Busy)
}
}
+ info(s"engineConnTask(${engineConnTask.getTaskId}) running task is ($getRunningTask) ")
val response = super.execute(engineConnTask)
if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) synchronized {
if (getStatus == NodeStatus.Busy && getConcurrentLimit > getRunningTask) {
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala
index 17e7d93..e2f00f5 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/execute/EngineExecutionContext.scala
@@ -43,6 +43,7 @@
private var defaultResultSetWriter: ResultSetWriter[_ <: MetaData, _ <: Record] = _
private var resultSize = 0
+ private var enableResultsetMetaWithTableName = false
private val properties: java.util.Map[String, Object] = new util.HashMap[String, Object]()
@@ -57,7 +58,7 @@
def setCurrentParagraph(currentParagraph: Int): Unit = this.currentParagraph = currentParagraph
- def pushProgress(progress: Float, progressInfo: Array[JobProgressInfo]): Unit = {
+ def pushProgress(progress: Float, progressInfo: Array[JobProgressInfo]): Unit = if (! executor.isInternalExecute){
val listenerBus = getEngineSyncListenerBus
getJobId.foreach(jId => {
listenerBus.postToAll(TaskProgressUpdateEvent(jId, progress, progressInfo))
@@ -135,7 +136,7 @@
//update by peaceWong 20200402 end
}
- def appendStdout(log: String): Unit = if (!executor.isEngineInitialized) {
+ def appendStdout(log: String): Unit = if (executor.isInternalExecute) {
executor.info(log)
} else {
val listenerBus = getEngineSyncListenerBus
@@ -162,4 +163,7 @@
def getExecutor: Executor = executor
+ def getEnableResultsetMetaWithTableName = enableResultsetMetaWithTableName
+
+ def setEnableResultsetMetaWithTableName(withTableName: Boolean): Unit = this.enableResultsetMetaWithTableName = withTableName
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala
index 5416d0f..3f94bbb 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationEngineConnHook.scala
@@ -19,7 +19,7 @@
import com.webank.wedatasphere.linkis.engineconn.callback.hook.CallbackEngineConnHook
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
@@ -32,6 +32,6 @@
engineConn: EngineConn): Unit =
{
super.afterEngineServerStartSuccess(engineCreationContext, engineConn)
- ComputationExecutorManager.getInstance.getReportExecutor.tryReady()
+ ExecutorManager.getInstance.getReportExecutor.tryReady()
}
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala
index 4c6b534..a3bf610 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/ComputationExecutorHook.scala
@@ -26,6 +26,8 @@
trait ComputationExecutorHook {
+ def getOrder(): Int = 1
+
def getHookName(): String
def beforeExecutorExecute(engineExecutionContext: EngineExecutionContext, engineCreationContext: EngineCreationContext, codeBeforeHook: String): String = codeBeforeHook
@@ -48,7 +50,7 @@
t: Throwable =>
error(t.getMessage)
}
- hooks.toArray
+ hooks.sortWith((a, b) => a.getOrder() <= b.getOrder()).toArray
}
def getComputationExecutorHooks = computationExecutorHooks
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala
index 4b209fd..748b03e 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/UDFLoadEngineConnHook.scala
@@ -20,12 +20,13 @@
import java.io.File
-import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.{ComputationExecutor, EngineExecutionContext}
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.entity.engine.{CodeLanguageLabel, EngineTypeLabel}
import com.webank.wedatasphere.linkis.udf.UDFClient
@@ -61,7 +62,7 @@
case "" =>
case c: String =>
info("Submit udf registration to engine, code: " + c)
- ComputationExecutorManager.getInstance.getExecutorByLabels(labels) match {
+ ExecutorManager.getInstance.getExecutorByLabels(labels) match {
case executor: ComputationExecutor =>
// val task = new CommonEngineConnTask("udf-register-" + UDFLoadEngineConnHook.taskIdGenerator.incrementAndGet(), false)
// task.setCode(c)
@@ -69,7 +70,16 @@
// task.setLabels(labels)
// task.setProperties(Maps.newHashMap())
// executor.execute(task)
- executor.executeLine(new EngineExecutionContext(executor), c)
+ Utils.tryCatch(executor.executeLine(new EngineExecutionContext(executor), c)){
+ case t: Throwable =>
+ if (! ComputationExecutorConf.UDF_LOAD_FAILED_IGNORE.getValue) {
+ Utils.tryQuietly(executor.close())
+ throw t
+ }else {
+ error("Failed to load udf", t)
+ null
+ }
+ }
}
info("executed code: " + c)
}
@@ -102,7 +112,7 @@
protected def getLoadUdfCode(user: String): String = {
info("start loading UDFs")
- val udfInfos = UDFClient.getUdfInfos(user).filter{ info => info.getUdfType == udfType && info.getExpire == false && info.getLoad == true}
+ val udfInfos = UDFClient.getUdfInfos(user, category).filter{ info => info.getUdfType == udfType && info.getExpire == false && info.getLoad == true}
udfInfos.map(constructCode).mkString("\n")
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/executor/EngineResultsetPrefixExecutorHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/executor/EngineResultsetPrefixExecutorHook.scala
index fd2c95d..3e6a75d 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/executor/EngineResultsetPrefixExecutorHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/hook/executor/EngineResultsetPrefixExecutorHook.scala
@@ -22,7 +22,7 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationExecutorHook
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis.ComputaionEngineContant.JOB_IN_RUNTIME_MAP_KEY
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis.ComputationEngineConstant.JOB_IN_RUNTIME_MAP_KEY
import com.webank.wedatasphere.linkis.governance.common.utils.GovernanceConstant
import com.webank.wedatasphere.linkis.protocol.utils.TaskUtils
import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionService.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionService.scala
index aa45ed2..c1d4f28 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionService.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionService.scala
@@ -46,5 +46,6 @@
def dealRequestTaskResume(requestTaskResume: RequestTaskResume): Unit
+ def clearCache(taskId: String): Unit
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
index 569b1de..99f73a4 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/service/TaskExecutionServiceImpl.scala
@@ -17,7 +17,8 @@
package com.webank.wedatasphere.linkis.engineconn.computation.executor.service
import java.util.concurrent.atomic.AtomicInteger
-import java.util.concurrent.{BlockingQueue, ExecutorService, Future, LinkedBlockingDeque, TimeUnit}
+import java.util.concurrent._
+
import com.google.common.cache.{Cache, CacheBuilder}
import com.webank.wedatasphere.linkis.common.listener.Event
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
@@ -27,11 +28,11 @@
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.service.LockService
import com.webank.wedatasphere.linkis.engineconn.common.conf.{EngineConnConf, EngineConnConstant}
import com.webank.wedatasphere.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
import com.webank.wedatasphere.linkis.engineconn.computation.executor.entity.{CommonEngineConnTask, EngineConnTask}
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.{ComputationExecutor, ConcurrentComputationExecutor}
import com.webank.wedatasphere.linkis.engineconn.computation.executor.listener.{ResultSetListener, TaskProgressListener, TaskStatusListener}
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis.{ComputaionEngineContant, ComputationEngineUtils}
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis.{ComputationEngineConstant, ComputationEngineUtils}
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineconn.executor.listener.ExecutorListenerBusContext
import com.webank.wedatasphere.linkis.engineconn.executor.listener.event.EngineConnSyncEvent
import com.webank.wedatasphere.linkis.governance.common.entity.ExecutionNodeStatus
@@ -51,14 +52,13 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
-import java.util
import scala.collection.JavaConverters._
@Component
class TaskExecutionServiceImpl extends TaskExecutionService with Logging with ResultSetListener with LogListener with TaskProgressListener with TaskStatusListener {
- private val executorManager = ComputationExecutorManager.getInstance
+ private lazy val executorManager = ExecutorManager.getInstance
private val taskExecutedNum = new AtomicInteger(0)
private var lastTask: EngineConnTask = _
private var lastTaskFuture: Future[_] = _
@@ -120,14 +120,14 @@
// 获取任务类型和任务代码,运行任务
val taskId: Int = taskExecutedNum.incrementAndGet()
val retryAble: Boolean = {
- val retry = requestTask.getProperties.getOrDefault(ComputaionEngineContant.RETRYABLE_TYPE_NAME, null)
+ val retry = requestTask.getProperties.getOrDefault(ComputationEngineConstant.RETRYABLE_TYPE_NAME, null)
if (null != retry) retry.asInstanceOf[Boolean]
else false
}
val task = new CommonEngineConnTask(String.valueOf(taskId), retryAble)
task.setCode(requestTask.getCode)
task.setProperties(requestTask.getProperties)
- task.data(ComputaionEngineContant.LOCK_TYPE_NAME, requestTask.getLock)
+ task.data(ComputationEngineConstant.LOCK_TYPE_NAME, requestTask.getLock)
task.setStatus(ExecutionNodeStatus.Scheduled)
val labels = requestTask.getLabels.asScala.toArray
task.setLabels(labels)
@@ -176,6 +176,7 @@
computationExecutor.transformTaskStatus(task, ExecutionNodeStatus.Failed)
case _ =>
}
+ clearCache(task.getTaskId)
}
}
lastTask = task
@@ -211,6 +212,7 @@
executor.transformTaskStatus(task, ExecutionNodeStatus.Failed)
case _ => //TODO response maybe lose
}
+ clearCache(task.getTaskId)
}
Thread.sleep(20)
} {
@@ -243,7 +245,7 @@
*/
private def openDaemonForTask(task: EngineConnTask, taskFuture: Future[_], scheduler: ExecutorService): Future[_] = {
scheduler.submit(new Runnable {
- override def run(): Unit = {
+ override def run(): Unit = Utils.tryAndWarn {
val sleepInterval = ComputationExecutorConf.ENGINE_PROGRESS_FETCH_INTERVAL.getValue
while(null != taskFuture && !taskFuture.isDone){
sendToEntrance(task, taskProgress(task.getTaskId))
@@ -257,7 +259,7 @@
if (StringUtils.isBlank(taskID)) return response
val executor = taskIdCache.getIfPresent(taskID)
if (null != executor) {
- val task = executor.getTaskById(taskID)
+ val task = getTaskByTaskId(taskID)
if (null != task) {
if (ExecutionNodeStatus.isCompleted(task.getStatus)) {
response = ResponseTaskProgress(taskID, 1.0f, null)
@@ -325,12 +327,13 @@
@Receiver
override def dealRequestTaskKill(requestTaskKill: RequestTaskKill): Unit = {
+ warn(s"Requested to kill task : ${requestTaskKill.execId}")
val executor = taskIdCache.getIfPresent(requestTaskKill.execId)
if (null != executor) {
executor.killTask(requestTaskKill.execId)
info(s"TaskId : ${requestTaskKill.execId} was killed by user.")
} else {
- error(s"Invalid executor : null for taskId : ${requestTaskKill.execId}")
+ error(s"Kill failed, got invalid executor : null for taskId : ${requestTaskKill.execId}")
}
}
@@ -356,7 +359,7 @@
if (null != task) {
sendToEntrance(task, ResponseTaskLog(logUpdateEvent.taskId, logUpdateEvent.log))
} else {
- error("Task cannot null! logupdateEvent: " + ComputationEngineUtils.GSON.toJson(logUpdateEvent))
+ error("Task cannot null! logupdateEvent: " + logUpdateEvent.taskId)
}
} else if (null != lastTask) {
val executor = executorManager.getReportExecutor
@@ -369,7 +372,7 @@
error("OnLogUpdate error. Invalid ComputationExecutor : " + ComputationEngineUtils.GSON.toJson(executor))
}
} else {
- info(s"Task not ready, log will be dropped : ${BDPJettyServerHelper.gson.toJson(logUpdateEvent)}")
+ info(s"Task not ready, log will be dropped : ${logUpdateEvent.taskId}")
}
}
@@ -414,7 +417,7 @@
info(s"Finished to deal result event ${taskResultCreateEvent.taskId}")
}
- def getTaskByTaskId(taskId: String): EngineConnTask = {
+ private def getTaskByTaskId(taskId: String): EngineConnTask = {
if (StringUtils.isBlank(taskId)) return null
val executor = taskIdCache.getIfPresent(taskId)
if (null != executor) {
@@ -441,4 +444,14 @@
}
+ override def clearCache(taskId: String): Unit = {
+ if (StringUtils.isBlank(taskId)) return
+ Utils.tryAndError {
+ val executor = taskIdCache.getIfPresent(taskId)
+ if (null != executor) {
+ executor.clearTaskCache(taskId)
+ taskIdCache.invalidate(taskId)
+ }
+ }
+ }
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/utlis/ComputaionEngineContant.scala b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala
similarity index 92%
rename from linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/utlis/ComputaionEngineContant.scala
rename to linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala
index b34cccc..ac27325 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/utlis/ComputaionEngineContant.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-computation-engineconn/src/main/scala/com/webank/wedatasphere/linkis/engineconn/computation/executor/utlis/ComputationEngineConstant.scala
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis
-object ComputaionEngineContant {
+object ComputationEngineConstant {
def RETRYABLE_TYPE_NAME = "taskRetryable"
@@ -25,4 +25,6 @@
def JOB_IN_RUNTIME_MAP_KEY = "job"
+ def CS_HOOK_ORDER = -1
+
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/pom.xml
index 3fcc49c..3049d41 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/com/webank/wedatasphere/linkis/engineconn/common/conf/EngineConnConf.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/com/webank/wedatasphere/linkis/engineconn/common/conf/EngineConnConf.scala
index 7987482..0563d35 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/com/webank/wedatasphere/linkis/engineconn/common/conf/EngineConnConf.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-common/src/main/scala/com/webank/wedatasphere/linkis/engineconn/common/conf/EngineConnConf.scala
@@ -16,7 +16,10 @@
package com.webank.wedatasphere.linkis.engineconn.common.conf
+import java.io.File
+
import com.webank.wedatasphere.linkis.common.conf.{CommonVars, TimeType}
+import org.apache.commons.lang.StringUtils
@@ -39,10 +42,17 @@
val ENGINE_LOCK_REFRESH_TIME = CommonVars("wds.linkis.engine.lock.refresh.time", 1000 * 60 * 3)
- val ENGINE_CONN_LOCAL_PATH_PWD_KEY = CommonVars("wds.linkis.engine.localpath.pwd.key", "PWD")
+ val ENGINE_CONN_LOCAL_PATH_PWD_KEY = CommonVars("wds.linkis.engine.work.home.key", "PWD")
val ENGINE_CONN_LOCAL_LOG_DIRS_KEY = CommonVars("wds.linkis.engine.logs.dir.key", "LOG_DIRS")
- val ENGINE_CONN_CREATION_WAIT_TIME = CommonVars("wds.linkis.engine.connector.init.time", new TimeType("3m"))
+ val ENGINE_CONN_CREATION_WAIT_TIME = CommonVars("wds.linkis.engine.connector.init.time", new TimeType("8m"))
+
+ def getWorkHome: String = System.getenv(ENGINE_CONN_LOCAL_PATH_PWD_KEY.getValue)
+
+ def getLogDir: String = {
+ val logDir = System.getenv(ENGINE_CONN_LOCAL_LOG_DIRS_KEY.getValue)
+ if(StringUtils.isNotEmpty(logDir)) logDir else new File(getWorkHome, "logs").getPath
+ }
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/pom.xml
index 9002611..76227ca 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/engineconn/EngineConnManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/engineconn/EngineConnManager.scala
index e104cd5..f62f77d 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/engineconn/EngineConnManager.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/engineconn/EngineConnManager.scala
@@ -19,6 +19,7 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
import com.webank.wedatasphere.linkis.engineconn.core.EngineConnObject
+import com.webank.wedatasphere.linkis.engineconn.core.exception.{EngineConnErrorCode, EngineConnFatalException}
trait EngineConnManager {
@@ -44,7 +45,12 @@
this.engineConn
}
- override def getEngineConn: EngineConn = this.engineConn
+ override def getEngineConn: EngineConn = {
+ if (null == this.engineConn) {
+ throw new EngineConnFatalException(EngineConnErrorCode.ENGINE_CONN_UN_INIT_CODE, "You need to wait for engine conn to be initialized before starting to call")
+ }
+ this.engineConn
+ }
}
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/exception/EngineConnErrorCode.scala
similarity index 63%
copy from linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala
copy to linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/exception/EngineConnErrorCode.scala
index 66fd946..fef2004 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/exception/EngineConnErrorCode.scala
@@ -1,12 +1,9 @@
/*
* Copyright 2019 WeBank
- *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
* http://www.apache.org/licenses/LICENSE-2.0
- *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -14,17 +11,10 @@
* limitations under the License.
*/
-package com.webank.wedatasphere.linkis.manager.engineplugin.common.resource
+package com.webank.wedatasphere.linkis.engineconn.core.exception
+object EngineConnErrorCode {
+ val ENGINE_CONN_UN_INIT_CODE = 12100
-/*
-/**
- */
-class UserTimeoutNodeResource extends UserNodeResource with TimeoutNodeResource {
- private var timeout: Long = _
-
- override def getTimeout: Long = timeout
-
- override def setTimeout(timeout: Long): Unit = this.timeout = timeout
-}*/
+}
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/exception/EngineConnFatalException.scala
similarity index 63%
rename from linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala
rename to linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/exception/EngineConnFatalException.scala
index 66fd946..5b90697 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/exception/EngineConnFatalException.scala
@@ -1,12 +1,9 @@
/*
* Copyright 2019 WeBank
- *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
* http://www.apache.org/licenses/LICENSE-2.0
- *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -14,17 +11,10 @@
* limitations under the License.
*/
-package com.webank.wedatasphere.linkis.manager.engineplugin.common.resource
+package com.webank.wedatasphere.linkis.engineconn.core.exception
+import com.webank.wedatasphere.linkis.common.exception.FatalException
+class EngineConnFatalException(errorCode: Int, msg: String) extends FatalException(errorCode, msg){
-/*
-/**
- */
-class UserTimeoutNodeResource extends UserNodeResource with TimeoutNodeResource {
- private var timeout: Long = _
-
- override def getTimeout: Long = timeout
-
- override def setTimeout(timeout: Long): Unit = this.timeout = timeout
-}*/
+}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/AbstractEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/AbstractEngineConnExecution.scala
index 74f6475..c5f1432 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/AbstractEngineConnExecution.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/AbstractEngineConnExecution.scala
@@ -36,6 +36,7 @@
engineCreationContext.getLabels().asScala.exists {
case engineConnModeLabel: EngineConnModeLabel =>
val mode = toEngineConnMode(engineConnModeLabel.getEngineConnMode)
+ info(s"EngineConnMode is ${engineConnModeLabel.getEngineConnMode}.")
getSupportedEngineConnModes.contains(mode)
case _ => false
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/EngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/EngineConnExecution.scala
index b52b33e..722813a 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/EngineConnExecution.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/execution/EngineConnExecution.scala
@@ -27,13 +27,15 @@
object EngineConnExecution extends Logging {
- private val engineExecutions = initEngineExecutions.sortBy(_.getOrder)
+ private val engineExecutions = initEngineExecutions
+
+ info("The list of EngineConnExecution: " + engineExecutions.toList)
private def initEngineExecutions: Array[EngineConnExecution] = {
Utils.tryThrow {
val reflections = ClassUtils.reflections
val allSubClass = reflections.getSubTypesOf(classOf[EngineConnExecution])
- allSubClass.asScala.filter(! ClassUtils.isInterfaceOrAbstract(_)).map(_.newInstance).toArray
+ allSubClass.asScala.filter(!ClassUtils.isInterfaceOrAbstract(_)).map(_.newInstance).toArray.sortBy(_.getOrder)
}(t => throw new EngineConnBuildFailedException(20000, "Cannot instance EngineConnExecution.", t))
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/executor/ExecutorManager.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/executor/ExecutorManager.scala
index ebbd220..47125d7 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/executor/ExecutorManager.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-core/src/main/scala/com/webank/wedatasphere/linkis/engineconn/core/executor/ExecutorManager.scala
@@ -29,7 +29,7 @@
import com.webank.wedatasphere.linkis.engineconn.executor.conf.EngineConnExecutorConfiguration
import com.webank.wedatasphere.linkis.engineconn.executor.entity.{Executor, LabelExecutor, SensibleExecutor}
import com.webank.wedatasphere.linkis.manager.engineplugin.common.EngineConnPlugin
-import com.webank.wedatasphere.linkis.manager.engineplugin.common.creation.{CodeLanguageLabelExecutorFactory, ExecutorFactory, LabelExecutorFactory, MultiExecutorEngineConnFactory, SingleExecutorEngineConnFactory, SingleLabelExecutorEngineConnFactory}
+import com.webank.wedatasphere.linkis.manager.engineplugin.common.creation._
import com.webank.wedatasphere.linkis.manager.engineplugin.common.exception.{EngineConnPluginErrorCode, EngineConnPluginErrorException}
import com.webank.wedatasphere.linkis.manager.label.entity.Label
@@ -81,43 +81,63 @@
protected def tryCreateExecutor(engineCreationContext: EngineCreationContext,
labels: Array[Label[_]]): LabelExecutor = {
- val labelExecutor = Option(labels).flatMap(_ => factories.find {
+ val labelStr = if(labels != null) labels.toList else "()"
+ info(s"Try to create a executor with labels $labelStr.")
+ val labelExecutor = if (null == labels || labels.isEmpty) {
+ defaultFactory.createExecutor(engineCreationContext, engineConn).asInstanceOf[LabelExecutor]
+ } else factories.find {
case labelExecutorFactory: LabelExecutorFactory => labelExecutorFactory.canCreate(labels)
case _ => false
}.map {
- case labelExecutorFactory: LabelExecutorFactory => labelExecutorFactory.createExecutor(engineCreationContext, engineConn, labels)
- }).getOrElse(defaultFactory.createExecutor(engineCreationContext, engineConn).asInstanceOf[LabelExecutor])
- info(s"Finished create executor ${labelExecutor.getId}.")
- labelExecutor.init()
- info(s"Finished init executor ${labelExecutor.getId}.")
- labelExecutor
- }
-
- protected def createExecutor(engineCreationContext: EngineCreationContext): LabelExecutor = {
- defaultFactory match {
- case labelExecutorFactory: CodeLanguageLabelExecutorFactory =>
- createExecutor(engineCreationContext, Array[Label[_]](labelExecutorFactory.getDefaultCodeLanguageLabel))
- case _ =>
- val executor = tryCreateExecutor(engineCreationContext, null)
- executors.put(executor.getId, executor)
- executor
+ case labelExecutorFactory: LabelExecutorFactory =>
+ info(s"Use ${labelExecutorFactory.getClass.getSimpleName} to create executor.")
+ labelExecutorFactory.createExecutor(engineCreationContext, engineConn, labels)
+ }.getOrElse{
+ info("No LabelExecutorFactory matched, use DefaultExecutorFactory to create executor.")
+ defaultFactory.createExecutor(engineCreationContext, engineConn).asInstanceOf[LabelExecutor]
}
-
+ info(s"Finished to create ${labelExecutor.getClass.getSimpleName}(${labelExecutor.getId}) with labels $labelStr.")
+ labelExecutor.init()
+ info(s"Finished to init ${labelExecutor.getClass.getSimpleName}(${labelExecutor.getId}).")
+ labelExecutor
}
protected def getLabelKey(labels: Array[Label[_]]): String = labels.map(_.getStringValue).mkString("&")
protected def createExecutor(engineCreationContext: EngineCreationContext,
+ labels: Array[Label[_]] = null): LabelExecutor = {
+ if (null == labels || labels.isEmpty) {
+ defaultFactory match {
+ case labelExecutorFactory: CodeLanguageLabelExecutorFactory =>
+ val createExecutorLabels = Array[Label[_]](labelExecutorFactory.getDefaultCodeLanguageLabel)
+ createLabelExecutor(engineCreationContext, createExecutorLabels)
+ case _ =>
+ val executor = tryCreateExecutor(engineCreationContext, null)
+ executors.put(executor.getId, executor)
+ executor
+ //throw new EngineConnPluginErrorException(EngineConnPluginErrorCode.INVALID_LABELS, "no labels and defaultFactory is not CodeLanguageLabelExecutorFactory")
+ }
+ } else {
+ createLabelExecutor(engineCreationContext, labels)
+ }
+
+ }
+
+ private def createLabelExecutor(engineCreationContext: EngineCreationContext,
labels: Array[Label[_]]): LabelExecutor = {
- if(null == labels || labels.isEmpty) return createExecutor(engineCreationContext)
val labelKey = getLabelKey(labels)
if (null == labelKey) {
val msg = "Cannot get label key. labels : " + GSON.toJson(labels)
throw new EngineConnPluginErrorException(EngineConnPluginErrorCode.INVALID_LABELS, msg)
}
+
+ if (!executors.containsKey(labelKey)) executors synchronized {
+ if (!executors.containsKey(labelKey)) {
val executor = tryCreateExecutor(engineCreationContext, labels)
executors.put(labelKey, executor)
- executor
+ }
+ }
+ executors.get(labelKey)
}
override def generateExecutorId(): Int = idCreator.getAndIncrement()
@@ -125,9 +145,11 @@
override def getExecutorByLabels(labels: Array[Label[_]]): LabelExecutor = {
val labelKey = getLabelKey(labels)
if (null == labelKey) return null
+ if (!executors.containsKey(labelKey)) executors synchronized {
if (!executors.containsKey(labelKey)) {
createExecutor(engineConn.getEngineCreationContext, labels)
}
+ }
executors.get(labelKey)
}
@@ -145,22 +167,34 @@
case (k, _) => executors.remove(k)
}.orNull
- override def getReportExecutor: Executor = if(getExecutors.isEmpty) createExecutor(engineConn.getEngineCreationContext)
- else getExecutors.maxBy {
- case executor: SensibleExecutor => executor.getLastActivityTime
+ override def getReportExecutor: Executor = if (getExecutors.isEmpty) {
+ val labels = defaultFactory match {
+ case labelExecutorFactory: CodeLanguageLabelExecutorFactory =>
+ Array[Label[_]](labelExecutorFactory.getDefaultCodeLanguageLabel)
+ case _ =>
+ if (null == engineConn.getEngineCreationContext.getLabels()) Array.empty[Label[_]]
+ else engineConn.getEngineCreationContext.getLabels().toArray[Label[_]](Array.empty[Label[_]])
+ }
+ createExecutor(engineConn.getEngineCreationContext, labels)
+ } else {
+ getExecutors.maxBy {
+ case executor: SensibleExecutor => executor.getStatus.ordinal()
case executor: Executor => executor.getId.hashCode
}
}
+}
-object ExecutorManager {
+object ExecutorManager extends Logging {
- private var executorManager: ExecutorManager = _
+ private var executorManager: LabelExecutorManager = _
private def init(): Unit = {
- executorManager = Utils.getClassInstance[ExecutorManager](EngineConnExecutorConfiguration.EXECUTOR_MANAGER_CLASS.acquireNew)
+ val executorManagerClass = EngineConnExecutorConfiguration.EXECUTOR_MANAGER_CLASS.acquireNew
+ info(s"Try to use $executorManagerClass to instance a ExecutorManager.")
+ executorManager = Utils.getClassInstance[LabelExecutorManager](executorManagerClass)
}
- def getInstance: ExecutorManager = {
+ def getInstance: LabelExecutorManager = {
if(executorManager == null) synchronized {
if(executorManager == null) init()
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml
index bbc9791..8d1b9cf 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/com/webank/wedatasphere/linkis/engineconn/acessible/executor/log/MountLogCache.java b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/com/webank/wedatasphere/linkis/engineconn/acessible/executor/log/MountLogCache.java
index 4058910..e1b2f62 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/com/webank/wedatasphere/linkis/engineconn/acessible/executor/log/MountLogCache.java
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/java/com/webank/wedatasphere/linkis/engineconn/acessible/executor/log/MountLogCache.java
@@ -49,7 +49,7 @@
public synchronized void enqueue(String value) {
if (count == max) {
- logger.warn("Queue is full, log: {} needs to be dropped", value);
+ logger.debug("Queue is full, log: {} needs to be dropped", value);
} else {
rear = (rear + 1) % max;
elements[rear] = value;
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/entity/AccessibleExecutor.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/entity/AccessibleExecutor.scala
index c668dd0..cc89b88 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/entity/AccessibleExecutor.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/entity/AccessibleExecutor.scala
@@ -27,7 +27,7 @@
private var isExecutorClosed = false
- def isIdle: Boolean = NodeStatus.isIdle(getStatus)
+ def isIdle: Boolean = NodeStatus.isIdle(getStatus) || NodeStatus.Starting == getStatus
def isBusy: Boolean = NodeStatus.isLocked(getStatus)
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala
index 6c1eac2..0055122 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/execution/AccessibleEngineConnExecution.scala
@@ -1,15 +1,40 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package com.webank.wedatasphere.linkis.engineconn.acessible.executor.execution
-import com.webank.wedatasphere.linkis.common.utils.Logging
+import java.util.concurrent.TimeUnit
+
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration
+import com.webank.wedatasphere.linkis.engineconn.acessible.executor.entity.AccessibleExecutor
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
import com.webank.wedatasphere.linkis.engineconn.common.execution.EngineConnExecution
+import com.webank.wedatasphere.linkis.engineconn.core.EngineConnObject
import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
+import com.webank.wedatasphere.linkis.engineconn.core.hook.ShutdownHook
import com.webank.wedatasphere.linkis.engineconn.executor.entity.{Executor, LabelExecutor, ResourceExecutor}
import com.webank.wedatasphere.linkis.engineconn.executor.listener.ExecutorListenerBusContext
import com.webank.wedatasphere.linkis.engineconn.executor.service.ManagerService
+import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
+import com.webank.wedatasphere.linkis.manager.common.protocol.engine.EngineConnReleaseRequest
import com.webank.wedatasphere.linkis.manager.common.protocol.resource.ResourceUsedProtocol
import com.webank.wedatasphere.linkis.rpc.Sender
+import org.apache.commons.lang.exception.ExceptionUtils
class AccessibleEngineConnExecution extends EngineConnExecution with Logging {
@@ -29,9 +54,11 @@
override def execute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {
init(engineCreationContext)
val executor = findReportExecutor(engineCreationContext, engineConn)
+ info(s"Created a report executor ${executor.getClass.getSimpleName}(${executor.getId}).")
beforeReportToLinkisManager(executor, engineCreationContext, engineConn)
reportUsedResource(executor, engineCreationContext)
reportLabel(executor)
+ executorStatusChecker
afterReportToLinkisManager(executor, engineCreationContext, engineConn)
}
@@ -40,19 +67,67 @@
listenerBusContext.getEngineConnAsyncListenerBus.start()
}
+
+ private def executorStatusChecker(): Unit = {
+ val context = EngineConnObject.getEngineCreationContext
+ val maxFreeTimeVar = AccessibleExecutorConfiguration.ENGINECONN_MAX_FREE_TIME.getValue(context.getOptions)
+ val maxFreeTimeStr = maxFreeTimeVar.toString
+ val maxFreeTime = maxFreeTimeVar.toLong
+ info("executorStatusChecker created, maxFreeTimeMills is " + maxFreeTime)
+ Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
+ override def run(): Unit = Utils.tryAndWarn {
+ val accessibleExecutor = ExecutorManager.getInstance.getReportExecutor match {
+ case executor: AccessibleExecutor => executor
+ case executor: Executor =>
+ warn(s"Executor(${executor.getId}) is not a AccessibleExecutor, do noting when reached max free time .")
+ return
+ }
+ if (NodeStatus.isCompleted(accessibleExecutor.getStatus)) {
+ error(s"${accessibleExecutor.getId} has completed with status ${accessibleExecutor.getStatus}, now stop it.")
+ ShutdownHook.getShutdownHook.notifyStop()
+ } else if (accessibleExecutor.getStatus == NodeStatus.ShuttingDown) {
+ warn(s"${accessibleExecutor.getId} is ShuttingDown...")
+ ShutdownHook.getShutdownHook.notifyStop()
+ } else if (maxFreeTime > 0 && (NodeStatus.Unlock.equals(accessibleExecutor.getStatus) || NodeStatus.Idle.equals(accessibleExecutor.getStatus) )
+ && System.currentTimeMillis - accessibleExecutor.getLastActivityTime > maxFreeTime) {
+ warn(s"${accessibleExecutor.getId} has not been used for $maxFreeTimeStr, now try to shutdown it.")
+ ShutdownHook.getShutdownHook.notifyStop()
+ requestManagerReleaseExecutor(" idle release")
+ Utils.defaultScheduler.scheduleWithFixedDelay(new Runnable {
+ override def run(): Unit = {
+ Utils.tryCatch {
+ warn(s"Now exit with code ${ShutdownHook.getShutdownHook.getExitCode()}")
+ System.exit(ShutdownHook.getShutdownHook.getExitCode())
+ } { t =>
+ error(s"Exit error : ${ExceptionUtils.getRootCauseMessage(t)}.", t)
+ System.exit(-1)
+ }
+ }
+ }, 3000,1000*10, TimeUnit.MILLISECONDS)
+ }
+ }
+ }, 3 * 60 * 1000, AccessibleExecutorConfiguration.ENGINECONN_HEARTBEAT_TIME.getValue.toLong, TimeUnit.MILLISECONDS)
+ }
+
+ def requestManagerReleaseExecutor(msg: String): Unit = {
+ val engineReleaseRequest = new EngineConnReleaseRequest(Sender.getThisServiceInstance, Utils.getJvmUser, msg, EngineConnObject.getEngineCreationContext.getTicketId)
+ ManagerService.getManagerService.requestReleaseEngineConn(engineReleaseRequest)
+ }
+
protected def reportUsedResource(executor: Executor, engineCreationContext: EngineCreationContext): Unit = executor match {
case resourceExecutor: ResourceExecutor =>
ManagerService.getManagerService
.reportUsedResource(ResourceUsedProtocol(Sender.getThisServiceInstance,
resourceExecutor.getCurrentNodeResource(), engineCreationContext.getTicketId))
+ info("In the first time, report usedResources to LinkisManager succeed.")
case _ =>
- info("Do not need to report usedResource.")
+ info("Do not need to report usedResources.")
}
protected def reportLabel(executor: Executor): Unit = executor match {
case labelExecutor: LabelExecutor =>
ManagerService.getManagerService.labelReport(labelExecutor.getExecutorLabels())
- info("Reported all labels to LinkisManager.")
+ info("In the first time, report all labels to LinkisManager succeed.")
case _ =>
info("Do not need to report labels.")
}
@@ -62,5 +137,5 @@
*
* @return
*/
- override def getOrder: Int = 0
+ override def getOrder: Int = 10
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala
index 75cee78..d186b78 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/lock/EngineConnTimedLock.scala
@@ -22,7 +22,9 @@
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.entity.AccessibleExecutor
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.listener.ExecutorStatusListener
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.listener.event.{ExecutorCompletedEvent, ExecutorCreateEvent, ExecutorStatusChangedEvent, ExecutorUnLockEvent}
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineconn.executor.listener.ExecutorListenerBusContext
+import com.webank.wedatasphere.linkis.engineconn.executor.service.ManagerService
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
class EngineConnTimedLock(private var timeout: Long) extends TimedLock with Logging with ExecutorStatusListener {
@@ -41,7 +43,7 @@
}
override def tryAcquire(executor: AccessibleExecutor): Boolean = {
- if (null == executor) return false
+ if (null == executor || NodeStatus.Unlock != executor.getStatus) return false
val succeed = lock.tryAcquire()
debug("try to lock for succeed is " + succeed.toString)
if (succeed) {
@@ -57,6 +59,7 @@
override def release(): Unit = {
debug("try to release for lock," + lockedBy + ",current thread " + Thread.currentThread().getName)
if (lockedBy != null) {
+ //&& lockedBy == Thread.currentThread() Inconsistent thread(线程不一致)
debug("try to release for lockedBy and thread ")
if (releaseTask != null) {
releaseTask.cancel(true)
@@ -65,6 +68,7 @@
debug("try to release for lock release success")
lockedBy = null
}
+ unlockCallback(lock.toString)
resetLock()
}
@@ -88,15 +92,17 @@
private def scheduleTimeout: Unit = {
synchronized {
- if (null == releaseTask || releaseTask.isDone) {
+ if (null == releaseTask ) {
releaseTask = releaseScheduler.scheduleWithFixedDelay(new Runnable {
override def run(): Unit = {
synchronized {
- if (isAcquired() && isExpired()) {
+ if (isAcquired() && NodeStatus.Idle == lockedBy.getStatus && isExpired()) {
// unlockCallback depends on lockedBy, so lockedBy cannot be set null before unlockCallback
- unlockCallback(lock.toString)
info(s"Lock : [${lock.toString} was released due to timeout." )
- resetLock()
+ release()
+ } else if (isAcquired() && NodeStatus.Busy == lockedBy.getStatus) {
+ lastLockTime = System.currentTimeMillis()
+ info("Update lastLockTime because executor is busy.")
}
}
}
@@ -148,8 +154,16 @@
}
private def unlockCallback(lockStr: String): Unit = {
- if (null != lockedBy) {
+ /*if (null != lockedBy) {
lockedBy.transition(NodeStatus.Unlock)
+ }*/
+ val executors = ExecutorManager.getInstance.getExecutors.filter(executor => null != executor && !executor.isClosed)
+ if (null != executors && !executors.isEmpty) {
+ executors.foreach(executor => executor match {
+ case accessibleExecutor: AccessibleExecutor =>
+ accessibleExecutor.transition(NodeStatus.Unlock)
+ case _ =>
+ })
}
ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus.post(ExecutorUnLockEvent(null, lockStr.toString))
}
@@ -160,7 +174,7 @@
override def onExecutorStatusChanged(executorStatusChangedEvent: ExecutorStatusChangedEvent): Unit = {
val toStatus = executorStatusChangedEvent.toStatus
- if (NodeStatus.Busy == toStatus || NodeStatus.Idle == toStatus) {
+ if (isAcquired() && NodeStatus.Idle == toStatus) {
info(s"Status changed to ${toStatus.name()}, update lastUpdatedTime for lock.")
lastLockTime = System.currentTimeMillis()
scheduleTimeout
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala
index c4c94e0..d8c881d 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultAccessibleService.scala
@@ -16,14 +16,11 @@
package com.webank.wedatasphere.linkis.engineconn.acessible.executor.service
-import java.util.concurrent.TimeUnit
import com.webank.wedatasphere.linkis.DataWorkCloudApplication
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
-import com.webank.wedatasphere.linkis.engineconn.acessible.executor.conf.AccessibleExecutorConfiguration
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.entity.AccessibleExecutor
import com.webank.wedatasphere.linkis.engineconn.acessible.executor.listener.event.{ExecutorCompletedEvent, ExecutorCreateEvent, ExecutorStatusChangedEvent}
import com.webank.wedatasphere.linkis.engineconn.core.EngineConnObject
-import com.webank.wedatasphere.linkis.engineconn.core.engineconn.EngineConnManager
import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineconn.core.hook.ShutdownHook
import com.webank.wedatasphere.linkis.engineconn.executor.entity.{Executor, SensibleExecutor}
@@ -37,7 +34,6 @@
import com.webank.wedatasphere.linkis.rpc.Sender
import javax.annotation.PostConstruct
-import org.apache.commons.lang.exception.ExceptionUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.context.event.{ContextClosedEvent, EventListener}
import org.springframework.stereotype.Service
@@ -70,11 +66,14 @@
@EventListener
def executorShutDownHook(event: ContextClosedEvent): Unit = {
info("executorShutDownHook start to execute.")
+ if (! EngineConnObject.isReady) {
+ warn("EngineConn not ready, do not shutdown")
+ return
+ }
var executor: Executor = ExecutorManager.getInstance.getReportExecutor
if (null != executor){
Utils.tryAndWarn{
executor.tryShutdown()
- Thread.sleep(2000)
}
warn(s"Engine : ${Sender.getThisInstance} with state has stopped successfully.")
@@ -87,6 +86,7 @@
}
executorHeartbeatService.reportHeartBeatMsg(executor)
info("Reported status shuttingDown to manager.")
+ Thread.sleep(2000)
}
override def stopExecutor: Unit = {
@@ -101,55 +101,10 @@
true
}
- /**
- * Service启动后则启动定时任务 空闲释放
- */
+
@PostConstruct
def init(): Unit = {
- val context = EngineConnObject.getEngineCreationContext
- val maxFreeTimeVar = AccessibleExecutorConfiguration.ENGINECONN_MAX_FREE_TIME.getValue(context.getOptions)
- val maxFreeTimeStr = maxFreeTimeVar.toString
- val maxFreeTime = maxFreeTimeVar.toLong
- info("maxFreeTimeMills is " + maxFreeTime)
- Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
- override def run(): Unit = Utils.tryAndWarn {
- val accessibleExecutor = ExecutorManager.getInstance.getReportExecutor match {
- case executor: AccessibleExecutor => executor
- case executor: Executor =>
- warn(s"Executor(${executor.getId}) is not a AccessibleExecutor, do noting when reached max free time .")
- return
- }
- if (NodeStatus.isCompleted(accessibleExecutor.getStatus)) {
- error(s"${accessibleExecutor.getId} has completed with status ${accessibleExecutor.getStatus}, now stop it.")
- ShutdownHook.getShutdownHook.notifyStop()
- } else if (accessibleExecutor.getStatus == NodeStatus.ShuttingDown) {
- warn(s"${accessibleExecutor.getId} is ShuttingDown...")
- ShutdownHook.getShutdownHook.notifyStop()
- } else if (maxFreeTime > 0 && NodeStatus.Unlock.equals(accessibleExecutor.getStatus) && System.currentTimeMillis - accessibleExecutor.getLastActivityTime > maxFreeTime) {
- warn(s"${accessibleExecutor.getId} has not been used for $maxFreeTimeStr, now try to shutdown it.")
- ShutdownHook.getShutdownHook.notifyStop()
- requestManagerReleaseExecutor(" idle release")
- Utils.defaultScheduler.scheduleWithFixedDelay(new Runnable {
- override def run(): Unit = {
- Utils.tryCatch {
- warn(s"Now exit with code ${ShutdownHook.getShutdownHook.getExitCode()}")
- System.exit(ShutdownHook.getShutdownHook.getExitCode())
- } { t =>
- error(s"Exit error : ${ExceptionUtils.getRootCauseMessage(t)}.", t)
- System.exit(-1)
- }
- }
- }, 3000, 1000*10, TimeUnit.MILLISECONDS)
- }
- }
- }, 3 * 60 * 1000, AccessibleExecutorConfiguration.ENGINECONN_HEARTBEAT_TIME.getValue.toLong, TimeUnit.MILLISECONDS)
asyncListenerBusContext.addListener(this)
- /*Utils.defaultScheduler.submit(new Runnable {
- override def run(): Unit = {
- Utils.addShutdownHook(executorShutDownHook())
- info("Succeed to register shutdownHook.")
- }
- })*/
}
private def stopEngine(): Unit = {
@@ -163,17 +118,21 @@
*/
override def requestManagerReleaseExecutor(msg: String): Unit = {
- val engineReleaseRequest = new EngineConnReleaseRequest(Sender.getThisServiceInstance, Utils.getJvmUser, msg, EngineConnManager.getEngineConnManager.getEngineConn.getEngineCreationContext.getTicketId)
+ val engineReleaseRequest = new EngineConnReleaseRequest(Sender.getThisServiceInstance, Utils.getJvmUser, msg, EngineConnObject.getEngineCreationContext.getTicketId)
ManagerService.getManagerService.requestReleaseEngineConn(engineReleaseRequest)
}
@Receiver
override def dealRequestNodeStatus(requestNodeStatus: RequestNodeStatus): ResponseNodeStatus = {
- val status = ExecutorManager.getInstance.getReportExecutor match {
+ val status = if (EngineConnObject.isReady) {
+ ExecutorManager.getInstance.getReportExecutor match {
case executor: SensibleExecutor =>
executor.getStatus
case _ => NodeStatus.Starting
}
+ } else {
+ NodeStatus.Starting
+ }
val responseNodeStatus = new ResponseNodeStatus
responseNodeStatus.setNodeStatus(status)
responseNodeStatus
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala
index 57b1ea2..6a213e6 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultExecutorHeartbeatService.scala
@@ -28,6 +28,7 @@
import com.webank.wedatasphere.linkis.engineconn.executor.entity.{Executor, ResourceExecutor, SensibleExecutor}
import com.webank.wedatasphere.linkis.engineconn.executor.listener.ExecutorListenerBusContext
import com.webank.wedatasphere.linkis.engineconn.executor.service.ManagerService
+import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
import com.webank.wedatasphere.linkis.manager.common.protocol.node.{NodeHeartbeatMsg, NodeHeartbeatRequest}
import com.webank.wedatasphere.linkis.message.annotation.Receiver
import com.webank.wedatasphere.linkis.rpc.Sender
@@ -69,36 +70,51 @@
*
* @param executor
*/
- override def reportHeartBeatMsg(executor: Executor): Unit = {
+ override def reportHeartBeatMsg(executor: Executor = null): Unit = {
ManagerService.getManagerService.heartbeatReport(generateHeartBeatMsg(executor))
}
@Receiver
- override def dealNodeHeartbeatRequest(nodeHeartbeatRequest: NodeHeartbeatRequest): NodeHeartbeatMsg = {
- val executor = ExecutorManager.getInstance.getReportExecutor
- generateHeartBeatMsg(executor)
- }
+ override def dealNodeHeartbeatRequest(nodeHeartbeatRequest: NodeHeartbeatRequest): NodeHeartbeatMsg = generateHeartBeatMsg(null)
+
+
override def onNodeHealthyUpdate(nodeHealthyUpdateEvent: NodeHealthyUpdateEvent): Unit = {
warn(s"node healthy update, tiger heartbeatReport")
- val executor = ExecutorManager.getInstance.getReportExecutor
- reportHeartBeatMsg(executor)
+ //val executor = ExecutorManager.getInstance.getReportExecutor
+ reportHeartBeatMsg()
}
- override def generateHeartBeatMsg(executor: Executor): NodeHeartbeatMsg = {
+ /**
+ * Generate heartbeat information through report by default
+ * If engine conn is not initialized, the default information is generated
+ * @param executor
+ * @return
+ */
+ override def generateHeartBeatMsg(executor: Executor = null): NodeHeartbeatMsg = {
+ val realExecutor = if (null == executor) {
+ if (EngineConnObject.isReady) ExecutorManager.getInstance.getReportExecutor else null
+ } else {
+ executor
+ }
+
val nodeHeartbeatMsg = new NodeHeartbeatMsg
nodeHeartbeatMsg.setServiceInstance(Sender.getThisServiceInstance)
+ if (null == realExecutor) {
+ nodeHeartbeatMsg.setStatus(NodeStatus.Starting)
+ return nodeHeartbeatMsg
+ }
nodeHeartbeatMsg.setOverLoadInfo(nodeOverLoadInfoManager.getNodeOverLoadInfo)
nodeHeartbeatMsg.setHealthyInfo(nodeHealthyInfoManager.getNodeHealthyInfo())
- executor match {
+ realExecutor match {
case sensibleExecutor: SensibleExecutor =>
nodeHeartbeatMsg.setStatus(sensibleExecutor.getStatus)
case _ =>
}
- executor match {
+ realExecutor match {
case resourceExecutor: ResourceExecutor =>
nodeHeartbeatMsg.setNodeResource(resourceExecutor.getCurrentNodeResource())
case _ =>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala
index 7500d9f..d3ec6ad 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/DefaultManagerService.scala
@@ -18,6 +18,7 @@
import java.util
+import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.engineconn.executor.service.ManagerService
import com.webank.wedatasphere.linkis.governance.common.conf.GovernanceCommonConf
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
@@ -26,9 +27,12 @@
import com.webank.wedatasphere.linkis.manager.common.protocol.node.{NodeHeartbeatMsg, ResponseNodeStatus}
import com.webank.wedatasphere.linkis.manager.common.protocol.resource.ResourceUsedProtocol
import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel
import com.webank.wedatasphere.linkis.rpc.Sender
-class DefaultManagerService extends ManagerService {
+import scala.collection.JavaConverters._
+
+class DefaultManagerService extends ManagerService with Logging{
protected def getManagerSender: Sender = {
@@ -41,7 +45,16 @@
override def labelReport(labels: util.List[Label[_]]): Unit = {
- val labelReportRequest = LabelReportRequest(labels, Sender.getThisServiceInstance)
+ if (null == labels || labels.isEmpty) {
+ info("labels is empty, Not reported")
+ return
+ }
+ val reportLabel = labels.asScala.filter(_.isInstanceOf[EngineTypeLabel])
+ if (reportLabel.isEmpty) {
+ info("engineType labels is empty, Not reported")
+ return
+ }
+ val labelReportRequest = LabelReportRequest(reportLabel.asJava, Sender.getThisServiceInstance)
getManagerSender.send(labelReportRequest)
}
@@ -58,6 +71,7 @@
override def heartbeatReport(nodeHeartbeatMsg: NodeHeartbeatMsg): Unit = {
getManagerSender.send(nodeHeartbeatMsg)
+ info(s"success to send engine heartbeat report to ${Sender.getInstances(GovernanceCommonConf.MANAGER_SPRING_NAME.getValue).map(_.getInstance).mkString(",")},status:${nodeHeartbeatMsg.getStatus},msg:${nodeHeartbeatMsg.getHeartBeatMsg}")
}
override def reportUsedResource(resourceUsedProtocol: ResourceUsedProtocol): Unit = {
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala
index 9960e4b..7d94210 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/accessible-executor/src/main/scala/com/webank/wedatasphere/linkis/engineconn/acessible/executor/service/EngineConnTimedLockService.scala
@@ -99,8 +99,18 @@
if (engineConnLock.tryAcquire(accessibleExecutor)) {
debug("try to lock for tryAcquire is true ")
this.lockString = engineConnLock.lock.toString
- ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus.post(ExecutorLockEvent(accessibleExecutor, lockString))
+ val executors = ExecutorManager.getInstance.getExecutors.filter(executor => null != executor && !executor.isClosed)
+ if (null != executors && !executors.isEmpty) {
+ executors.foreach(executor => executor match {
+ case accessibleExecutor: AccessibleExecutor =>
+ accessibleExecutor.transition(NodeStatus.Idle)
+ case _ =>
+ })
+ } else {
+ error("No valid executors while adding lock.")
accessibleExecutor.transition(NodeStatus.Idle)
+ }
+ ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus.post(ExecutorLockEvent(accessibleExecutor, lockString))
Some(lockString)
} else None
case _ =>
@@ -121,14 +131,6 @@
info(s"try to unlock lockEntity : lockString=$lockString,lockedBy=${engineConnLock.lockedBy.getId}")
engineConnLock.release()
this.lockString = null
- ExecutorListenerBusContext.getExecutorListenerBusContext().getEngineConnAsyncListenerBus.post(ExecutorUnLockEvent(null, lock))
- ExecutorManager.getInstance.getReportExecutor match {
- case accessibleExecutor: AccessibleExecutor =>
- accessibleExecutor.transition(NodeStatus.Unlock)
- case _ =>
- val msg = s"Invalid executor or not instance of SensibleEngine."
- error(msg)
- }
true
} else {
false
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/pom.xml
index 2e8e24d..5cd6798 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/src/main/scala/com/webank/wedatasphere/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/src/main/scala/com/webank/wedatasphere/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala
index a989c12..2716029 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/src/main/scala/com/webank/wedatasphere/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/callback-service/src/main/scala/com/webank/wedatasphere/linkis/engineconn/callback/hook/CallbackEngineConnHook.scala
@@ -22,6 +22,7 @@
import com.webank.wedatasphere.linkis.common.conf.DWCArgumentsParser
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engineconn.callback.service.{EngineConnAfterStartCallback, EngineConnPidCallback}
+import com.webank.wedatasphere.linkis.engineconn.common.conf.EngineConnConf
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
@@ -58,8 +59,9 @@
override def afterEngineServerStartFailed(engineCreationContext: EngineCreationContext, throwable: Throwable): Unit = {
val engineConnAfterStartCallback = new EngineConnAfterStartCallback(engineCreationContext.getEMInstance)
+ val prefixMsg = Sender.getThisServiceInstance + s": log dir: ${EngineConnConf.getLogDir},"
engineConnAfterStartCallback.callback(EngineConnStatusCallback(Sender.getThisServiceInstance,
- engineCreationContext.getTicketId, NodeStatus.Failed, ExceptionUtils.getRootCauseMessage(throwable)))
+ engineCreationContext.getTicketId, NodeStatus.Failed, prefixMsg + ExceptionUtils.getRootCauseMessage(throwable)))
error("EngineConnSever start failed! now exit.", throwable)
ShutdownHook.getShutdownHook.notifyError(throwable)
}
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml
index cbbe4dd..c254c29 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/executor-core/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/resource-executor/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/resource-executor/pom.xml
index 520a28d..22a72c5 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/resource-executor/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-executor/resource-executor/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/pom.xml b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/pom.xml
index 865a759..bf3c77e 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/src/main/scala/com/webank/wedatasphere/linkis/engineconn/launch/EngineConnServer.scala b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/src/main/scala/com/webank/wedatasphere/linkis/engineconn/launch/EngineConnServer.scala
index 84446c7..32f1f18 100644
--- a/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/src/main/scala/com/webank/wedatasphere/linkis/engineconn/launch/EngineConnServer.scala
+++ b/linkis-computation-governance/linkis-engineconn/linkis-engineconn-launch/src/main/scala/com/webank/wedatasphere/linkis/engineconn/launch/EngineConnServer.scala
@@ -1,11 +1,10 @@
package com.webank.wedatasphere.linkis.engineconn.launch
import com.webank.wedatasphere.linkis.common.ServiceInstance
-import com.webank.wedatasphere.linkis.common.conf.CommonVars
+import com.webank.wedatasphere.linkis.common.conf.{CommonVars, Configuration}
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engineconn.common.creation.{DefaultEngineCreationContext, EngineCreationContext}
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.common.execution.EngineConnExecution
import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
import com.webank.wedatasphere.linkis.engineconn.core.EngineConnObject
import com.webank.wedatasphere.linkis.engineconn.core.engineconn.EngineConnManager
@@ -16,7 +15,7 @@
import com.webank.wedatasphere.linkis.governance.common.exception.engineconn.{EngineConnExecutorErrorCode, EngineConnExecutorErrorException}
import com.webank.wedatasphere.linkis.governance.common.utils.EngineConnArgumentsParser
import com.webank.wedatasphere.linkis.manager.engineplugin.common.launch.process.Environment
-import com.webank.wedatasphere.linkis.manager.label.builder.factory.{LabelBuilderFactory, LabelBuilderFactoryContext, StdLabelBuilderFactory}
+import com.webank.wedatasphere.linkis.manager.label.builder.factory.{LabelBuilderFactory, LabelBuilderFactoryContext}
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import org.apache.commons.lang.exception.ExceptionUtils
@@ -34,9 +33,14 @@
def main(args: Array[String]): Unit = {
info("<<---------------------EngineConnServer Start --------------------->>")
- Utils.tryCatch {
+ try {
// 1. 封装EngineCreationContext
init(args)
+ val isTestMode = Configuration.IS_TEST_MODE.getValue(engineCreationContext.getOptions)
+ if(isTestMode) {
+ info(s"Step into test mode, pause 30s if debug is required. If you want to disable test mode, please set ${Configuration.IS_TEST_MODE.key} = false.")
+ Utils.sleepQuietly(30000)
+ }
info("Finished to create EngineCreationContext, EngineCreationContext content: " + EngineConnUtils.GSON.toJson(engineCreationContext))
EngineConnHook.getEngineConnHooks.foreach(_.beforeCreateEngineConn(getEngineCreationContext))
info("Finished to execute hook of beforeCreateEngineConn.")
@@ -55,7 +59,8 @@
EngineConnHook.getEngineConnHooks.foreach(_.afterExecutionExecute(getEngineCreationContext, engineConn))
info("Finished to execute hook of afterExecutionExecute")
EngineConnHook.getEngineConnHooks.foreach(_.afterEngineServerStartSuccess(getEngineCreationContext, engineConn))
- } { t =>
+ } catch {
+ case t: Throwable =>
EngineConnHook.getEngineConnHooks.foreach(_.afterEngineServerStartFailed(getEngineCreationContext, t))
error("EngineConnServer Start Failed", t)
System.exit(1)
@@ -92,7 +97,7 @@
this.engineCreationContext.setOptions(jMap)
this.engineCreationContext.setArgs(args)
EngineConnObject.setEngineCreationContext(this.engineCreationContext)
- info("Finished to init engineCreationContext" + EngineConnUtils.GSON.toJson(engineCreationContext))
+ info("Finished to init engineCreationContext: " + EngineConnUtils.GSON.toJson(engineCreationContext))
}
private def executeEngineConn(engineConn: EngineConn): Unit = {
diff --git a/linkis-computation-governance/linkis-engineconn/pom.xml b/linkis-computation-governance/linkis-engineconn/pom.xml
index 0115cb2..dacab20 100644
--- a/linkis-computation-governance/linkis-engineconn/pom.xml
+++ b/linkis-computation-governance/linkis-engineconn/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-entrance-client/pom.xml b/linkis-computation-governance/linkis-entrance-client/pom.xml
index 60a6b73..5cd06f9 100644
--- a/linkis-computation-governance/linkis-entrance-client/pom.xml
+++ b/linkis-computation-governance/linkis-entrance-client/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-entrance-client</artifactId>
diff --git a/linkis-computation-governance/linkis-entrance/pom.xml b/linkis-computation-governance/linkis-entrance/pom.xml
index 0ffb05c..e0aef15 100644
--- a/linkis-computation-governance/linkis-entrance/pom.xml
+++ b/linkis-computation-governance/linkis-entrance/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-entrance</artifactId>
@@ -118,6 +118,18 @@
<version>${linkis.version}</version>
</dependency>
+ <dependency>
+ <groupId>com.webank.wedatasphere.linkis</groupId>
+ <artifactId>linkis-bml-client</artifactId>
+ <version>${linkis.version}</version>
+ </dependency>
+
+ <dependency>
+ <groupId>com.webank.wedatasphere.linkis</groupId>
+ <artifactId>linkis-instance-label-client</artifactId>
+ <version>${linkis.version}</version>
+ </dependency>
+
</dependencies>
<build>
diff --git a/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml
index 6781dc2..1cdbc45 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml
+++ b/linkis-computation-governance/linkis-entrance/src/main/assembly/distribution.xml
@@ -288,31 +288,6 @@
</dependencySets>
<fileSets>
- <fileSet>
- <directory>${basedir}/src/main/resources</directory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0777</fileMode>
- <outputDirectory>conf</outputDirectory>
- <lineEnding>unix</lineEnding>
- </fileSet>
- <!-- <fileSet>
- <directory>${basedir}/bin</directory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0777</fileMode>
- <outputDirectory>bin</outputDirectory>
- <lineEnding>unix</lineEnding>
- </fileSet>
- <fileSet>
- <directory>.</directory>
- <excludes>
- <exclude>*/**</exclude>
- </excludes>
- <outputDirectory>logs</outputDirectory>
- </fileSet> -->
</fileSets>
</assembly>
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/conf/EntranceSpringConfiguration.java b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/conf/EntranceSpringConfiguration.java
index bef491c..e51b673 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/conf/EntranceSpringConfiguration.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/conf/EntranceSpringConfiguration.java
@@ -14,11 +14,11 @@
package com.webank.wedatasphere.linkis.entrance.conf;
import com.webank.wedatasphere.linkis.entrance.EntranceParser;
-import com.webank.wedatasphere.linkis.entrance.EntranceServer;
import com.webank.wedatasphere.linkis.entrance.annotation.*;
import com.webank.wedatasphere.linkis.entrance.event.*;
import com.webank.wedatasphere.linkis.entrance.execute.impl.EntranceExecutorManagerImpl;
import com.webank.wedatasphere.linkis.entrance.interceptor.EntranceInterceptor;
+import com.webank.wedatasphere.linkis.entrance.interceptor.OnceJobInterceptor;
import com.webank.wedatasphere.linkis.entrance.interceptor.impl.*;
import com.webank.wedatasphere.linkis.entrance.log.*;
import com.webank.wedatasphere.linkis.entrance.parser.CommonEntranceParser;
@@ -109,6 +109,7 @@
@ConditionalOnMissingBean(name = {EntranceInterceptorBeanAnnotation.BEAN_NAME})
public EntranceInterceptor[] generateEntranceInterceptors() {
return new EntranceInterceptor[]{
+ new OnceJobInterceptor(),
new CSEntranceInterceptor(),
new ShellDangerousGrammerInterceptor(),
new PythonCodeCheckInterceptor(),
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/context/DefaultEntranceContext.java b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/context/DefaultEntranceContext.java
index c27fe0f..7da57ce 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/context/DefaultEntranceContext.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/context/DefaultEntranceContext.java
@@ -120,10 +120,4 @@
return this.logListenerBus;
}
-
- @Override
- public EntranceExecutorManager getOrCreateExecutorManager() {
- return (EntranceExecutorManager) getOrCreateScheduler().getSchedulerContext().getOrCreateExecutorManager();
- }
-
}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/exception/EntranceErrorCode.java b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/exception/EntranceErrorCode.java
index ab67318..2d8101d 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/exception/EntranceErrorCode.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/exception/EntranceErrorCode.java
@@ -28,7 +28,8 @@
EXECUTE_REQUEST_INVALID(20010, "EntranceExecuteRequest invalid "),
SUBMIT_JOB_ERROR(20011, "Submit job error "),
INIT_JOB_ERROR(20012, "Init job error "),
- RESULT_NOT_PERSISTED_ERROR(20013, "Result not persisted error ")
+ RESULT_NOT_PERSISTED_ERROR(20013, "Result not persisted error "),
+ GROUP_NOT_FOUND(20014, "group not found")
;
private int errCode;
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/persistence/QueryPersistenceManager.java b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/persistence/QueryPersistenceManager.java
index b4b7384..a7ee4ac 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/persistence/QueryPersistenceManager.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/persistence/QueryPersistenceManager.java
@@ -18,11 +18,13 @@
import com.webank.wedatasphere.linkis.common.io.FsPath;
import com.webank.wedatasphere.linkis.entrance.EntranceContext;
import com.webank.wedatasphere.linkis.entrance.cs.CSEntranceHelper;
+import com.webank.wedatasphere.linkis.entrance.execute.EntranceExecutorManager;
import com.webank.wedatasphere.linkis.entrance.execute.EntranceJob;
import com.webank.wedatasphere.linkis.entrance.job.EntranceExecuteRequest;
import com.webank.wedatasphere.linkis.governance.common.entity.job.JobRequest;
import com.webank.wedatasphere.linkis.governance.common.entity.job.SubJobInfo;
import com.webank.wedatasphere.linkis.protocol.engine.JobProgressInfo;
+import com.webank.wedatasphere.linkis.scheduler.executer.ExecutorManager;
import com.webank.wedatasphere.linkis.scheduler.executer.OutputExecuteResponse;
import com.webank.wedatasphere.linkis.scheduler.queue.Job;
import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper;
@@ -77,7 +79,11 @@
try {
path = createResultSetEngine().persistResultSet(request, response);
} catch (Throwable e) {
- EntranceJob job = (EntranceJob) getEntranceContext().getOrCreateExecutorManager().getEntranceJobByExecId(request.getJob().getId()).getOrElse(null);
+ EntranceJob job = null;
+ ExecutorManager executorManager = getEntranceContext().getOrCreateScheduler().getSchedulerContext().getOrCreateExecutorManager();
+ if (EntranceExecutorManager.class.isInstance(executorManager)) {
+ job = (EntranceJob) ((EntranceExecutorManager) executorManager).getEntranceJobByExecId(request.getJob().getId()).getOrElse(null);
+ }
String msg = "Persist resultSet failed for subJob : " + request.getSubJobInfo().getSubJobDetail().getId() + ", response : " + BDPJettyServerHelper.gson().toJson(response);
logger.error(msg);
if (null != job) {
@@ -90,7 +96,10 @@
if(StringUtils.isNotBlank(path)) {
EntranceJob job = null;
try {
- job = (EntranceJob) getEntranceContext().getOrCreateExecutorManager().getEntranceJobByExecId(request.getJob().getId()).getOrElse(null);
+ ExecutorManager executorManager = getEntranceContext().getOrCreateScheduler().getSchedulerContext().getOrCreateExecutorManager();
+ if (EntranceExecutorManager.class.isInstance(executorManager)) {
+ job = (EntranceJob) ((EntranceExecutorManager) executorManager).getEntranceJobByExecId(request.getJob().getId()).getOrElse(null);
+ }
} catch (Throwable e) {
try {
entranceContext.getOrCreateLogManager().onLogUpdate(job, "store resultSet failed! reason: " + ExceptionUtils.getRootCauseMessage(e));
diff --git a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulApi.java b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulApi.java
index a7580e5..0a25a10 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulApi.java
+++ b/linkis-computation-governance/linkis-entrance/src/main/java/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulApi.java
@@ -28,10 +28,12 @@
import com.webank.wedatasphere.linkis.protocol.engine.JobProgressInfo;
import com.webank.wedatasphere.linkis.protocol.utils.ZuulEntranceUtils;
import com.webank.wedatasphere.linkis.rpc.Sender;
+import com.webank.wedatasphere.linkis.scheduler.listener.LogListener;
import com.webank.wedatasphere.linkis.scheduler.queue.Job;
import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEventState;
import com.webank.wedatasphere.linkis.server.Message;
import com.webank.wedatasphere.linkis.server.security.SecurityFilter;
+import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.codehaus.jackson.JsonNode;
import org.slf4j.Logger;
@@ -78,6 +80,13 @@
// try{
logger.info("Begin to get an execID");
json.put(TaskConstant.UMUSER, SecurityFilter.getLoginUsername(req));
+ HashMap<String, String> map = (HashMap) json.get(TaskConstant.SOURCE);
+ if(map == null){
+ map = new HashMap<>();
+ json.put(TaskConstant.SOURCE, map);
+ }
+ String ip = JobHistoryHelper.getRequestIpAddr(req);
+ map.put(TaskConstant.REQUEST_IP, ip);
String execID = entranceServer.execute(json);
Job job = entranceServer.getJob(execID).get();
JobRequest jobReq = ((EntranceJob) job).getJobRequest();
@@ -86,7 +95,7 @@
pushLog("************************************SCRIPT CODE************************************", job);
pushLog(jobReq.getExecutionCode(), job);
pushLog("************************************SCRIPT CODE************************************", job);
- pushLog(LogUtils.generateInfo("Your job is accepted, jobID is " + execID + " and taskID is " + taskID + ". Please wait it to be scheduled"), job);
+ pushLog(LogUtils.generateInfo("Your job is accepted, jobID is " + execID + " and taskID is " + taskID + " in " + Sender.getThisServiceInstance().toString() + ". Please wait it to be scheduled"), job);
execID = ZuulEntranceUtils.generateExecID(execID, Sender.getThisServiceInstance().getApplicationName(), new String[]{Sender.getThisInstance()});
message = Message.ok();
message.setMethod("/api/entrance/execute");
@@ -109,6 +118,13 @@
Message message = null;
logger.info("Begin to get an execID");
json.put(TaskConstant.SUBMIT_USER, SecurityFilter.getLoginUsername(req));
+ HashMap<String, String> map = (HashMap) json.get(TaskConstant.SOURCE);
+ if(map == null){
+ map = new HashMap<>();
+ json.put(TaskConstant.SOURCE, map);
+ }
+ String ip = JobHistoryHelper.getRequestIpAddr(req);
+ map.put(TaskConstant.REQUEST_IP, ip);
String execID = entranceServer.execute(json);
Job job = entranceServer.getJob(execID).get();
JobRequest jobRequest = ((EntranceJob) job).getJobRequest();
@@ -117,7 +133,7 @@
pushLog("************************************SCRIPT CODE************************************", job);
pushLog(jobRequest.getExecutionCode(), job);
pushLog("************************************SCRIPT CODE************************************", job);
- pushLog(LogUtils.generateInfo("Your job is accepted, jobID is " + execID + " and taskID is " + taskID + ". Please wait it to be scheduled"), job);
+ pushLog(LogUtils.generateInfo("Your job is accepted, jobID is " + execID + " and taskID is " + taskID + " in " + Sender.getThisServiceInstance().toString() + ". Please wait it to be scheduled"), job);
execID = ZuulEntranceUtils.generateExecID(execID, Sender.getThisServiceInstance().getApplicationName(), new String[]{Sender.getThisInstance()});
message = Message.ok();
message.setMethod("/api/entrance/submit");
@@ -141,8 +157,7 @@
try {
job = entranceServer.getJob(realId);
} catch (Exception e) {
- logger.warn("error occurred while getting task {} status (获取任务 {} 状态时出现错误)",realId, realId, e);
- //如果获取错误了,证明在内存中已经没有了,去jobhistory找寻一下taskID代表的任务的状态,然后返回
+ logger.warn("获取任务 {} 状态时出现错误", realId, e.getMessage());
long realTaskID = Long.parseLong(taskID);
String status = JobHistoryHelper.getStatusByTaskID(realTaskID);
message = Message.ok();
@@ -255,22 +270,26 @@
retLog = sb.toString();
}
} catch (IllegalStateException e) {
- logger.error("Failed to get log information for :{}(为 {} 获取日志失败)", job.get().getId(), job.get().getId(),e);
- message = Message.error("Failed to get log information(获取日志信息失败)");
+ logger.debug("Failed to get log information for :{}(为 {} 获取日志失败)", job.get().getId(), job.get().getId(),e);
+ message = Message.ok();
message.setMethod("/api/entrance/" + id + "/log");
message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine);
} catch (final IllegalArgumentException e) {
- logger.error("Failed to get log information for :{}(为 {} 获取日志失败)", job.get().getId(), job.get().getId(),e);
- message = Message.error("Failed to get log information(获取日志信息失败)");
+ logger.debug("Failed to get log information for :{}(为 {} 获取日志失败)", job.get().getId(), job.get().getId(),e);
+ message = Message.ok();
message.setMethod("/api/entrance/" + id + "/log");
message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine);
return Message.messageToResponse(message);
} catch (final Exception e1) {
- logger.error("Failed to get log information for :{}(为 {} 获取日志失败)", job.get().getId(), job.get().getId(),e1);
+ logger.debug("Failed to get log information for :{}(为 {} 获取日志失败)", job.get().getId(), job.get().getId(),e1);
message = Message.error("Failed to get log information(获取日志信息失败)");
message.setMethod("/api/entrance/" + id + "/log");
message.data("log", "").data("execID", id).data("fromLine", retFromLine + fromLine);
return Message.messageToResponse(message);
+ } finally {
+ if (null != logReader && job.get().isCompleted()) {
+ IOUtils.closeQuietly(logReader);
+ }
}
message = Message.ok();
message.setMethod("/api/entrance/" + id + "/log");
@@ -285,12 +304,13 @@
@Override
@POST
- @Path("/killJobs")
- public Response killJobs(@Context HttpServletRequest req, JsonNode jsonNode) {
+ @Path("/{id}/killJobs")
+ public Response killJobs(@Context HttpServletRequest req, JsonNode jsonNode, @PathParam("id") String strongExecId) {
JsonNode idNode = jsonNode.get("idList");
JsonNode taskIDNode = jsonNode.get("taskIDList");
+ ArrayList<Long> waitToForceKill = new ArrayList<>();
if(idNode.size() != taskIDNode.size()){
- return Message.messageToResponse(Message.error("he length of the ID list does not match the length of the TASKID list(id列表的长度与taskId列表的长度不一致)"));
+ return Message.messageToResponse(Message.error("The length of the ID list does not match the length of the TASKID list(id列表的长度与taskId列表的长度不一致)"));
}
if(!idNode.isArray() || !taskIDNode.isArray()){
return Message.messageToResponse(Message.error("Request parameter error, please use array(请求参数错误,请使用数组)"));
@@ -305,9 +325,9 @@
try {
job = entranceServer.getJob(realId);
} catch (Exception e) {
- logger.warn("can not find a job in entranceServer, will force to kill it", e);
+ logger.warn("can not find a job in entranceServer, will force to kill it", e.getMessage());
//如果在内存中找不到该任务,那么该任务可能已经完成了,或者就是重启导致的
- JobHistoryHelper.forceKill(taskID);
+ waitToForceKill.add(taskID);
Message message = Message.ok("Forced Kill task (强制杀死任务)");
message.setMethod("/api/entrance/" + id + "/kill");
message.setStatus(0);
@@ -317,7 +337,7 @@
Message message = null;
if (job.isEmpty()) {
logger.warn("can not find a job in entranceServer, will force to kill it");
- JobHistoryHelper.forceKill(taskID);
+ waitToForceKill.add(taskID);
message = Message.ok("Forced Kill task (强制杀死任务)");
message.setMethod("/api/entrance/" + id + "/killJobs");
message.setStatus(0);
@@ -334,7 +354,12 @@
if (job.get() instanceof EntranceJob) {
EntranceJob entranceJob = (EntranceJob) job.get();
JobRequest jobReq = entranceJob.getJobRequest();
- jobReq.setStatus(SchedulerEventState.Cancelled().toString());
+ entranceJob.updateJobRequestStatus(SchedulerEventState.Cancelled().toString());
+ jobReq.setProgress("1.0f");
+ LogListener logListener = entranceJob.getLogListener().getOrElse(null);
+ if (null != logListener) {
+ logListener.onLogUpdate(entranceJob, "Job " + jobReq.getId() + " was kill by user successfully(任务" + jobReq.getId() + "已成功取消)");
+ }
this.entranceServer.getEntranceContext().getOrCreatePersistenceManager().createPersistenceEngine().updateIfNeeded(jobReq);
}
logger.info("end to kill job {} ", job.get().getId());
@@ -347,6 +372,9 @@
}
messages.add(message);
}
+ if(!waitToForceKill.isEmpty()){
+ JobHistoryHelper.forceBatchKill(waitToForceKill);
+ }
return Message.messageToResponse(Message.ok("停止任务成功").data("messages", messages));
}
@@ -389,7 +417,7 @@
if (job.get() instanceof EntranceJob) {
EntranceJob entranceJob = (EntranceJob) job.get();
JobRequest jobReq = entranceJob.getJobRequest();
- jobReq.setStatus(SchedulerEventState.Cancelled().toString());
+ entranceJob.updateJobRequestStatus(SchedulerEventState.Cancelled().toString());
this.entranceServer.getEntranceContext().getOrCreatePersistenceManager().createPersistenceEngine().updateIfNeeded(jobReq);
}
logger.info("end to kill job {} ", job.get().getId());
@@ -411,7 +439,7 @@
Option<Job> job = entranceServer.getJob(realId);
Message message = null;
if (job.isEmpty()) {
- message = Message.error("can not find the job of exexID :" + id +" can not to pause it (不能找到execID: " + id + "对应的job,不能进行pause)");
+ message = Message.error("can not find the job of exexID :" + id +" can not pause (不能找到execID: " + id + "对应的job,不能进行pause)");
message.setMethod("/api/entrance/" + id + "/pause");
message.setStatus(1);
} else {
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceContext.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceContext.scala
index 99a957d..164973e 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceContext.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceContext.scala
@@ -17,7 +17,6 @@
package com.webank.wedatasphere.linkis.entrance
import com.webank.wedatasphere.linkis.entrance.event._
-import com.webank.wedatasphere.linkis.entrance.execute.EntranceExecutorManager
import com.webank.wedatasphere.linkis.entrance.interceptor.EntranceInterceptor
import com.webank.wedatasphere.linkis.entrance.log.LogManager
import com.webank.wedatasphere.linkis.entrance.persistence.PersistenceManager
@@ -45,6 +44,4 @@
def getOrCreateLogListenerBus: EntranceLogListenerBus[EntranceLogListener, EntranceLogEvent]
-
- def getOrCreateExecutorManager: EntranceExecutorManager
}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceServer.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceServer.scala
index caeb9e8..4729626 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceServer.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/EntranceServer.scala
@@ -15,20 +15,14 @@
import com.webank.wedatasphere.linkis.common.exception.{ErrorException, LinkisException, LinkisRuntimeException}
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
-import com.webank.wedatasphere.linkis.entrance.conf.EntranceConfiguration
-import com.webank.wedatasphere.linkis.entrance.event.{EntranceJobLogEvent, EntranceLogEvent, EntranceLogListener}
import com.webank.wedatasphere.linkis.entrance.exception.{EntranceErrorException, SubmitFailedException}
-import com.webank.wedatasphere.linkis.entrance.execute.EntranceJob
+import com.webank.wedatasphere.linkis.entrance.execute.{EntranceExecutorManager, EntranceJob}
import com.webank.wedatasphere.linkis.entrance.log.LogReader
import com.webank.wedatasphere.linkis.entrance.timeout.JobTimeoutManager
-import com.webank.wedatasphere.linkis.governance.common.constant.job.JobRequestConstants
import com.webank.wedatasphere.linkis.governance.common.entity.job.JobRequest
-import com.webank.wedatasphere.linkis.governance.common.entity.task.RequestPersistTask
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.{JobReqUpdate, JobRespProtocol}
import com.webank.wedatasphere.linkis.rpc.Sender
-import com.webank.wedatasphere.linkis.scheduler.queue.{Job, SchedulerEvent, SchedulerEventState}
+import com.webank.wedatasphere.linkis.scheduler.queue.{Job, SchedulerEventState}
import com.webank.wedatasphere.linkis.server.conf.ServerConfiguration
-import org.apache.commons.lang.StringUtils
import org.apache.commons.lang.exception.ExceptionUtils
@@ -51,7 +45,7 @@
* @return
*/
def execute(params: java.util.Map[String, Any]): String = {
- if(!params.containsKey(EntranceServer.DO_NOT_PRINT_PARAMS_LOG)) info("received a request: " + params)
+ if(!params.containsKey(EntranceServer.DO_NOT_PRINT_PARAMS_LOG)) debug("received a request: " + params)
else params.remove(EntranceServer.DO_NOT_PRINT_PARAMS_LOG)
var jobRequest = getEntranceContext.getOrCreateEntranceParser().parseToTask(params)
// tod multi entrance instances
@@ -63,6 +57,8 @@
if (null == jobRequest.getId || jobRequest.getId <= 0) {
throw new EntranceErrorException(20052, "Persist jobRequest error, please submit again later(存储Job异常,请稍后重新提交任务)")
}
+ info(s"received a request,convert $jobRequest ")
+
val logAppender = new java.lang.StringBuilder()
Utils.tryThrow(getEntranceContext.getOrCreateEntranceInterceptors().foreach(int => jobRequest = int.apply(jobRequest, logAppender))) { t =>
val error = t match {
@@ -84,7 +80,6 @@
error
}
- getEntranceContext.getOrCreatePersistenceManager().createPersistenceEngine().updateIfNeeded(jobRequest)
val job = getEntranceContext.getOrCreateEntranceParser().parseToJob(jobRequest)
Utils.tryThrow{
job.init()
@@ -105,14 +100,21 @@
}
getEntranceContext.getOrCreateScheduler().submit(job)
- logger.info(s"Job with jobId : ${job.getId} and execID : ${job.getId()} submitted ")
+ val msg = s"Job with jobId : ${job.getId} and execID : ${job.getId()} submitted "
+ logger.info(msg)
job match {
case entranceJob: EntranceJob =>
- getEntranceContext.getOrCreateExecutorManager.setJobExecIdAndEntranceJob(entranceJob.getId, entranceJob)
+ getEntranceContext.getOrCreateScheduler().getSchedulerContext.getOrCreateExecutorManager match {
+ case entranceExecutorManager: EntranceExecutorManager =>
+ entranceExecutorManager.setJobExecIdAndEntranceJob(entranceJob.getId, entranceJob)
+ case _ =>
+ }
entranceJob.getJobRequest.setReqId(job.getId())
if(jobTimeoutManager.timeoutCheck && JobTimeoutManager.hasTimeoutLabel(entranceJob)) jobTimeoutManager.add(job.getId(), entranceJob)
+ entranceJob.getLogListener.foreach(_.onLogUpdate(entranceJob, msg))
case _ =>
}
+ getEntranceContext.getOrCreatePersistenceManager().createPersistenceEngine().updateIfNeeded(jobRequest)
job.getId()
}{t =>
job.onFailure("Submitting the query failed!(提交查询失败!)", t)
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/conf/EntranceConfiguration.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/conf/EntranceConfiguration.scala
index 7b0ae44..3c01293 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/conf/EntranceConfiguration.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/conf/EntranceConfiguration.scala
@@ -77,7 +77,7 @@
* wds.linkis.dwc.instance is a parameter used to control the number of engines each user starts.
*wds.linkis.instance 是用来进行控制每个用户启动engine数量的参数
*/
- val WDS_LINKIS_INSTANCE = CommonVars("wds.linkis.instance", 3)
+ val WDS_LINKIS_INSTANCE = CommonVars("wds.linkis.rm.instance", 3)
val LOG_EXCLUDE_ALL = CommonVars("wds.linkis.log.exclude.all", "com.netflix")
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/DefaultEntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/DefaultEntranceExecutor.scala
index a0deb19..91d1d43 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/DefaultEntranceExecutor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/DefaultEntranceExecutor.scala
@@ -177,15 +177,19 @@
}
case failedResponse: FailedTaskResponse =>
val msg = s"SubJob : ${entranceExecuteRequest.getSubJobInfo.getSubJobDetail.getId} failed to execute task, code : ${failedResponse.getErrorCode}, reason : ${failedResponse.getErrorMsg}."
- info(msg)
- entranceExecuteRequest.getJob.getLogListener.foreach(_.onLogUpdate(entranceExecuteRequest.getJob, LogUtils.generateERROR(msg)))
+ debug(msg)
+ entranceExecuteRequest.getSubJobInfo.setStatus(SchedulerEventState.Failed.toString)
+ entranceExecuteRequest.getJob.getEntranceContext.getOrCreatePersistenceManager().createPersistenceEngine().updateIfNeeded(entranceExecuteRequest.getSubJobInfo)
entranceExecutorManager.clearOrchestrationCache(orchestration)
entranceExecutorManager.clearJobCache(entranceExecuteRequest.getJob.getId)
- Utils.tryAndWarn(doOnFailed(entranceExecuteRequest, orchestration, failedResponse))
+ Utils.tryAndWarn{
+ doOnFailed(entranceExecuteRequest, orchestration, failedResponse)
+ entranceExecuteRequest.getJob.getEntranceContext.getOrCreatePersistenceManager().createPersistenceEngine().updateIfNeeded(entranceExecuteRequest.getJob.getJobRequest)
+ }
case o =>
val msg = s"Job : ${entranceExecuteRequest.getJob.getId} , subJob : ${entranceExecuteRequest.getSubJobInfo.getSubJobDetail.getId} returnd unknown response : ${BDPJettyServerHelper.gson.toJson(o)}"
error(msg)
- entranceExecuteRequest.getJob.getLogListener.foreach(_.onLogUpdate(entranceExecuteRequest.getJob, msg))
+ entranceExecuteRequest.getJob.getLogListener.foreach(_.onLogUpdate(entranceExecuteRequest.getJob, LogUtils.generateERROR(msg)))
// todo
}
}
@@ -218,37 +222,45 @@
private def doOnFailed(entranceExecuteRequest: EntranceExecuteRequest, orchestration: Orchestration, failedResponse: FailedTaskResponse) = {
val msg = failedResponse.getErrorCode + ", " + failedResponse.getErrorMsg
+ val failedMsg = s"Your job execution failed because subjob : ${entranceExecuteRequest.getSubJobInfo.getSubJobDetail.getId} failed, the reason: ${msg}"
if (null != orchestration) {
val jobReturn = entranceExecutorManager.getAsyncJobReturnByOrchestration(orchestration).getOrElse {
- error(s"Cannot get jobReturn for orchestration : ${orchestration.toString}")
+ debug(s"Cannot get jobReturn for orchestration : ${orchestration.toString}")
null
}
if (null != jobReturn) {
jobReturn.notifyError(msg, failedResponse.getCause)
jobReturn.notifyStatus(ResponseTaskStatus(jobReturn.subJobId, ExecutionNodeStatus.Failed))
+ jobReturn.request match {
+ case entranceExecuteRequest: EntranceExecuteRequest =>
+ entranceExecuteRequest.getJob.getLogListener.foreach(_.onLogUpdate(entranceExecuteRequest.getJob, failedMsg))
}
}
- entranceExecuteRequest.getJob.error(msg, failedResponse.getCause)
- val failedMsg = s"Your job failed because subjob : ${entranceExecuteRequest.getSubJobInfo.getSubJobDetail.getId} failed, ${msg}"
+ }
entranceExecuteRequest.getJob.transitionCompleted(ErrorExecuteResponse(failedResponse.getErrorMsg, failedResponse.getCause), failedMsg)
entranceExecuteRequest.getJob.onFailure(msg, failedResponse.getCause)
}
override def kill(execId: String): Boolean = {
- info(s"start to kill job $execId")
+ warn(s"start to kill job $execId")
Utils.tryAndWarn {
- engineReturns.find(_.getJobId.contains(execId)).exists(e => {
val msg = s"You job with id ${execId} was cancelled by user!"
- e.orchestrationFuture.cancel(msg)
- e.notifyError(msg)
- killExecId(e, e.subJobId)
+ val runningJobFutureOption = getRunningOrchestrationFuture
+ if (None != runningJobFutureOption && null != runningJobFutureOption.get) {
+ runningJobFutureOption.get.cancel(msg)
+ } else {
+ error(s"Invalid jobFuture while cancelling job ${execId}.")
+ }
val job = entranceExecutorManager.getEntranceJobByExecId(execId).getOrElse(null)
if (null != job) {
+ job.updateJobRequestStatus(SchedulerEventState.Cancelled.toString)
job.getLogListener.foreach(_.onLogUpdate(job, msg))
job.transitionCompleted(ErrorExecuteResponse(msg, null), msg)
- }
true
- })
+ } else {
+ warn(s"Cannot get job by execId : ${execId}, will not kill.")
+ false
+ }
}
}
@@ -289,8 +301,6 @@
// 1. create JobReq
val compJobReq = requestToComputationJobReq(entranceExecuteRequest)
Utils.tryCatch {
- entranceExecuteRequest.getJob.getLogListener.foreach(_.onLogUpdate(entranceExecuteRequest.getJob, s"One subJob has been submitted."))
- // entranceExecuteRequest.getJob.getLogListener.foreach(_.onLogUpdate(entranceExecuteRequest.getJob, s"code: >> ${entranceExecuteRequest.code()}"))
// 2. orchestrate compJobReq get Orchestration
val orchestration = EntranceOrchestrationFactory.getOrchestrationSession().orchestrate(compJobReq)
// val planMsg = orchestration.explain(true)
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceExecutor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceExecutor.scala
index 8fb2606..f85c2b9 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceExecutor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceExecutor.scala
@@ -85,7 +85,7 @@
override def toString: String = s"string"
protected def killExecId(asynReturn: EngineExecuteAsynReturn, subJobId: String): Boolean = {
- info(s"begin to send killExecId, execID: $subJobId")
+ info(s"begin to send killExecId, subJobId : $subJobId")
Utils.tryCatch {
asynReturn.orchestrationFuture.cancel(s"Job ${subJobId} was cancelled by user.")
true
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceJob.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceJob.scala
index 6b2d55e..b846bcf 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceJob.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/execute/EntranceJob.scala
@@ -37,6 +37,7 @@
import com.webank.wedatasphere.linkis.scheduler.executer.{CompletedExecuteResponse, ErrorExecuteResponse, SuccessExecuteResponse}
import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEventState._
import com.webank.wedatasphere.linkis.scheduler.queue.{Job, SchedulerEventState}
+import org.apache.commons.lang.StringUtils
import scala.beans.BeanProperty
@@ -143,7 +144,7 @@
setEngineInstance(jobRequest)
}*/
- getJobRequest.setStatus(toState.toString)
+ updateJobRequestStatus(toState.toString)
super.afterStateChanged(fromState, toState)
toState match {
case Scheduled =>
@@ -183,10 +184,19 @@
}
override def onFailure(errorMsg: String, t: Throwable): Unit = {
- getJobRequest.setStatus(SchedulerEventState.Failed.toString)
- getJobRequest.setErrorDesc(errorMsg)
+ updateJobRequestStatus(SchedulerEventState.Failed.toString)
+ val generatedMsg = LogUtils.generateERROR(s"Sorry, your job executed failed with reason: $errorMsg")
this.entranceLogListenerBus.foreach(_.post(
- EntrancePushLogEvent(this, LogUtils.generateERROR(s"Sorry, your job executed failed with reason: $errorMsg"))))
+ EntrancePushLogEvent(this, generatedMsg)))
+ this.getLogListener.foreach(_.onLogUpdate(this, generatedMsg))
+// transitionCompleted() // todo
+ if (StringUtils.isBlank(getJobRequest.getErrorDesc)) {
+ if (null != t) {
+ getJobRequest.setErrorDesc(errorMsg + " " + t.getMessage)
+ } else {
+ getJobRequest.setErrorDesc(errorMsg)
+ }
+ }
super.onFailure(errorMsg, t)
}
@@ -194,9 +204,9 @@
executeCompleted match {
case error: ErrorExecuteResponse => // todo checkif RPCUtils.isReceiverNotExists(error.t) =>
entranceListenerBus.foreach(_.post(MissingEngineNotifyEvent(this, error.t, getExecutor)))
- getJobRequest.setStatus(SchedulerEventState.Failed.toString)
+ updateJobRequestStatus(SchedulerEventState.Failed.toString)
case _ : SuccessExecuteResponse =>
- getJobRequest.setStatus(SchedulerEventState.Succeed.toString)
+ updateJobRequestStatus(SchedulerEventState.Succeed.toString)
getJobRequest.setErrorCode(0)
getJobRequest.setErrorDesc(null)
case _ =>
@@ -211,7 +221,7 @@
}
def transitionCompleted(executeCompleted: CompletedExecuteResponse, reason: String): Unit = {
- info("Job directly completed with reason: " + reason)
+ debug("Job directly completed with reason: " + reason)
transitionCompleted(executeCompleted)
}
@@ -234,6 +244,41 @@
}
}
+
+ /*
+ Update old status of internal jobRequest.
+ if old status is complete, will not update the status
+ if index of new status is bigger then old status, then update the old status to new status
+ */
+ def updateJobRequestStatus(newStatus: String): Unit = {
+ val oriStatus = {
+ if (StringUtils.isNotBlank(getJobRequest.getStatus)) {
+ SchedulerEventState.withName(getJobRequest.getStatus)
+ } else {
+ SchedulerEventState.Inited
+ }
+ }
+ if (StringUtils.isNotBlank(newStatus)) {
+ Utils.tryCatch{
+ val tmpStatus = SchedulerEventState.withName(newStatus)
+ if (SchedulerEventState.isCompleted(oriStatus) && !SchedulerEventState.Cancelled.equals(tmpStatus)) {
+ warn(s"Job ${getJobRequest.getId} status : ${getJobRequest.getStatus} is completed, will not change to : $newStatus")
+ return
+ }
+ if (tmpStatus.id > oriStatus.id) {
+ getJobRequest.setStatus(tmpStatus.toString)
+ } else {
+ warn(s"Job ${getJobRequest.getId} 's index of status : ${oriStatus.toString} is not smaller then new status : ${newStatus}, will not change status.")
+ }
+ } {
+ case e: Exception =>
+ error(s"Invalid job status : ${newStatus}, ${e.getMessage}")
+ return
+ }
+ } else {
+ error("Invalid job status : null")
+ }
+ }
}
object EntranceJob {
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/OnceJobInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/OnceJobInterceptor.scala
new file mode 100644
index 0000000..e051ed1
--- /dev/null
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/OnceJobInterceptor.scala
@@ -0,0 +1,92 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.entrance.interceptor
+import java.io.ByteArrayInputStream
+import java.{lang, util}
+
+import com.webank.wedatasphere.linkis.bml.client.BmlClientFactory
+import com.webank.wedatasphere.linkis.common.utils.Utils
+import com.webank.wedatasphere.linkis.governance.common.entity.job.{JobRequest, OnceExecutorContent}
+import com.webank.wedatasphere.linkis.governance.common.utils.OnceExecutorContentUtils
+import com.webank.wedatasphere.linkis.governance.common.utils.OnceExecutorContentUtils.BmlResource
+import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
+import com.webank.wedatasphere.linkis.manager.label.entity.JobLabel
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineConnMode._
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.{CodeLanguageLabel, EngineConnModeLabel}
+import com.webank.wedatasphere.linkis.protocol.constants.TaskConstant
+import com.webank.wedatasphere.linkis.protocol.utils.TaskUtils
+import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
+
+import scala.collection.convert.WrapAsJava._
+import scala.collection.convert.WrapAsScala._
+
+/**
+ * Created by enjoyyin on 2021/5/27.
+ */
+class OnceJobInterceptor extends EntranceInterceptor {
+
+ private val onceModes = Array(Once_With_Cluster.toString, Once.toString, Computation_With_Once.toString)
+ private val bmlClient = BmlClientFactory.createBmlClient(Utils.getJvmUser)
+
+ /**
+ * The apply function is to supplement the information of the incoming parameter task, making the content of this task more complete.
+ * * Additional information includes: database information supplement, custom variable substitution, code check, limit limit, etc.
+ * apply函数是对传入参数task进行信息的补充,使得这个task的内容更加完整。
+ * 补充的信息包括: 数据库信息补充、自定义变量替换、代码检查、limit限制等
+ *
+ * @param task
+ * @param logAppender Used to cache the necessary reminder logs and pass them to the upper layer(用于缓存必要的提醒日志,传给上层)
+ * @return
+ */
+ override def apply(task: JobRequest, logAppender: lang.StringBuilder): JobRequest = {
+ val existsOnceLabel = task.getLabels.exists {
+ case e: EngineConnModeLabel => onceModes.contains(e.getEngineConnMode)
+ case _ => false
+ }
+ if(!existsOnceLabel) return task
+ val jobLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[JobLabel])
+ jobLabel.setJobId(task.getId.toString)
+ task.getLabels.add(jobLabel)
+ val onceExecutorContent = new OnceExecutorContent
+ val params = task.getParams.map{case (k, v) => k -> v.asInstanceOf[Any]}
+ implicit def toMap(map: util.Map[String, Any]): util.Map[String, Object] = map.map { case (k, v) => k -> v.asInstanceOf[Object]}
+ onceExecutorContent.setSourceMap(task.getSource.map { case (k, v) => k -> v.asInstanceOf[Object]})
+ onceExecutorContent.setVariableMap(TaskUtils.getVariableMap(params))
+ onceExecutorContent.setRuntimeMap(TaskUtils.getRuntimeMap(params))
+ onceExecutorContent.setJobContent(getJobContent(task))
+ onceExecutorContent.setExtraLabels(new util.HashMap[String, Object]) //TODO Set it if needed
+ val contentMap = OnceExecutorContentUtils.contentToMap(onceExecutorContent)
+ val bytes = BDPJettyServerHelper.jacksonJson.writeValueAsBytes(contentMap)
+ val response = bmlClient.uploadResource(task.getExecuteUser, getFilePath(task), new ByteArrayInputStream(bytes))
+ val value = OnceExecutorContentUtils.resourceToValue(BmlResource(response.resourceId, response.version))
+ TaskUtils.addStartupMap(params,
+ Map(OnceExecutorContentUtils.ONCE_EXECUTOR_CONTENT_KEY -> value.asInstanceOf[Any]))
+ task
+ }
+
+ protected def getFilePath(task: JobRequest): String = s"/tmp/${task.getExecuteUser}/${task.getId}"
+
+ protected def getJobContent(task: JobRequest): util.Map[String, Object] = {
+ //TODO Wait for optimizing since the class `JobRequest` is waiting for optimizing .
+ val jobContent = new util.HashMap[String, Object]
+ jobContent.put(TaskConstant.CODE, task.getExecutionCode)
+ task.getLabels.foreach {
+ case label: CodeLanguageLabel =>
+ jobContent.put(TaskConstant.RUNTYPE, label.getCodeType)
+ case _ =>
+ }
+ jobContent
+ }
+
+}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CSEntranceInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CSEntranceInterceptor.scala
index f15921b..e6bd587 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CSEntranceInterceptor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CSEntranceInterceptor.scala
@@ -25,11 +25,11 @@
class CSEntranceInterceptor extends EntranceInterceptor with Logging {
override def apply(task: JobRequest, logAppender: lang.StringBuilder): JobRequest = {
- logger.info("Start to execute CSEntranceInterceptor")
+ logger.debug("Start to execute CSEntranceInterceptor")
Utils.tryAndWarn(CSEntranceHelper.addCSVariable(task))
Utils.tryAndWarn(CSEntranceHelper.resetCreator(task))
Utils.tryAndWarn(CSEntranceHelper.initNodeCSInfo(task))
- logger.info("Finished to execute CSEntranceInterceptor")
+ logger.debug("Finished to execute CSEntranceInterceptor")
task
}
}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CustomVariableUtils.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
index 096fe16..c689480 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/CustomVariableUtils.scala
@@ -250,7 +250,9 @@
else
throw VarSubstitutionException(20040, "please use correct date format,example:run_date=20170101")
}
- case _ => if ((allCatch opt value.toDouble).isDefined) {
+ case _ => /*if ((allCatch opt value.toLong).isDefined) {
+ nameAndType(name) = LongType(value.toLong)
+ } else*/ if ((allCatch opt value.toDouble).isDefined) {
nameAndType(name) = DoubleValue(value.toDouble)
} else {
nameAndType(name) = StringType(value)
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/Explain.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/Explain.scala
index 6ecaba9..aa6b08c 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/Explain.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/Explain.scala
@@ -109,7 +109,7 @@
case e:Exception => logger.warn("sql limit check error happens")
executionCode.contains(IDE_ALLOW_NO_LIMIT)
}
- if (isNoLimitAllowed) logAppender.append(LogUtils.generateWarn("please attention ,SQL full export mode opens(请注意,SQL全量导出模式打开)\n"))
+ if (isNoLimitAllowed) logAppender.append(LogUtils.generateWarn("please pay attention ,SQL full export mode opened(请注意,SQL全量导出模式打开)\n"))
if(tempCode.contains("""\;""")){
val semicolonIndexes = findRealSemicolonIndex(tempCode)
var oldIndex = 0
@@ -180,8 +180,8 @@
}
//如果一段sql是 --xxx回车select * from default.users,那么他也是select语句
val realCode = cleanComment(code)
- val tmpRealCode = realCode.trim.split("\\s+")(0)
- tmpRealCode.equalsIgnoreCase("select") || tmpRealCode.equalsIgnoreCase("select*")
+ // 以前,在判断,对于select* from xxx这样的SQL时会出现问题的,但是这种语法hive是支持的
+ realCode.trim.split("\\s+")(0).toLowerCase.contains("select")
}
def continueWhenError = false
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
index e203001..9041b17 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/LabelCheckInterceptor.scala
@@ -48,7 +48,7 @@
case requestPersistTask: JobRequest =>
val labels = requestPersistTask.getLabels
checkEngineTypeLabel(labels)
- checkUserCreatorLabel(labels)
+ checkUserCreatorLabel(labels, jobRequest.getSubmitUser, jobRequest.getExecuteUser)
jobRequest
case _ => jobRequest
}
@@ -66,12 +66,17 @@
}
- private def checkUserCreatorLabel(labels: java.util.List[Label[_]]): Unit = {
+ private def checkUserCreatorLabel(labels: java.util.List[Label[_]], submitUser: String, executeUser: String): Unit = {
val userCreatorLabelOption = labels.find(_.isInstanceOf[UserCreatorLabel])
if (userCreatorLabelOption.isDefined) {
val userCreator = userCreatorLabelOption.get.asInstanceOf[UserCreatorLabel]
if (StringUtils.isNotBlank(userCreator.getUser)) {
+ val userInLabel = userCreator.getUser
+ if (userInLabel.equalsIgnoreCase(executeUser) && userInLabel.equalsIgnoreCase(submitUser)) {
return
+ } else {
+ throw LabelCheckException(50080, s"SubmitUser : ${submitUser} must be the same as ExecuteUser : ${executeUser} , and user : ${userInLabel} in userCreatorLabel.")
+ }
}
}
throw LabelCheckException(50079, "UserCreatorLabel must be need")
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala
index bc1df1c..644614e 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/interceptor/impl/ShellDangerousGrammerInterceptor.scala
@@ -90,6 +90,13 @@
* @return
*/
override def apply(jobRequest: JobRequest, logAppender: lang.StringBuilder): JobRequest = {
+ val codeType = LabelUtil.getCodeType(jobRequest.getLabels)
+ val engineType = LabelUtil.getEngineType(jobRequest.getLabels)
+ // todo check enum equals
+ if (CodeType.Shell.equals(CodeType.getType(codeType))
+ || EngineType.SHELL.equals(EngineType.mapStringToEngineType(engineType))) {
+ info(s"GET REQUEST CODE_TYPE ${codeType} and ENGINE_TYPE ${EngineType}")
jobRequest
+ } else jobRequest
}
}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/CacheLogReader.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/CacheLogReader.scala
index 9412f64..03b7090 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/CacheLogReader.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/CacheLogReader.scala
@@ -38,6 +38,9 @@
var fileSystem:Fs = _
+ var closed = false
+
+
private def createInputStream: InputStream = {
if (fileSystem == null) this synchronized {
if (fileSystem == null){
@@ -104,6 +107,7 @@
})
fileSystem = null
}
+ closed = true
}
}
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/ErrorCodeManager.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/ErrorCodeManager.scala
index af6fab9..97f2bf9 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/ErrorCodeManager.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/log/ErrorCodeManager.scala
@@ -46,212 +46,7 @@
}
}
-abstract class FileErrorCodeManager extends ErrorCodeManager with Logging {
- override def getErrorCodes: Array[ErrorCode] = (new ArrayBuffer[ErrorCode]() ++= getCommonErrorCodes ++= getErrorCodesDynamic).toArray
- val errorCodeFileDir: String = {
- val specifiedDir = EntranceConfiguration.ERROR_CODE_FILE_DIR.getValue
- if (specifiedDir.endsWith("/")) specifiedDir else specifiedDir + "/"
- }
-
- val errorCodeFile: String
- private val user = EntranceConfiguration.ENTRANCE_USER.getValue
- private val fileSystem = FSFactory.getFs(new FsPath(errorCodeFileDir))
-
- @PostConstruct
- def init(): Unit = {
- Utils.tryAndWarn(fileSystem.init(new util.HashMap[String, String]()))
- }
-
- val dynamicErrorCodes: ArrayBuffer[ErrorCode] = new ArrayBuffer[ErrorCode]()
-
- Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
- override def run(): Unit = {
- logger.info("start to get error code properties from {}", errorCodeFile)
- dynamicErrorCodes.clear()
- val bufferedReader = new BufferedReader(new InputStreamReader(fileSystem.read(new FsPath(errorCodeFile)), "utf-8"))
- var line: String = null
- while ( {
- line = bufferedReader.readLine(); line != null
- }) {
- val arr = line.split(",")
- if (arr.length < 3){
- logger.warn("errorcode line {} format is not correct", line)
- }else{
- dynamicErrorCodes += ErrorCode(arr(0).r.unanchored, arr(1), arr(2))
- }
- }
- }
- }, 0, 1,TimeUnit.HOURS)
-
-
- def getErrorCodesDynamic:Array[ErrorCode] = dynamicErrorCodes.toArray
-
- def getCommonErrorCodes:Array[ErrorCode] = {
- Array(ErrorCode("queue (\\S+) is not exists in YARN".r.unanchored, EntranceErrorConstants.QUEUE_NOT_EXIST,
- "会话创建失败,%s队列不存在,请检查队列设置是否正确"),
- ErrorCode("User (\\S+) cannot submit applications to queue (\\S+)".r.unanchored,
- EntranceErrorConstants.USER_PERMISSION_NOT_ALLOW,
- "会话创建失败,用户%s不能提交应用到队列:%s,请检查队列设置是否正确"),
- ErrorCode(("您本次向任务队列([a-zA-Z_0-9\\.]+)请求资源((.+)),任务队列最大可用资源(.+),任务队列剩余可用资源((.+))您已占用任务队列资源(" +
- ".+)").r.unanchored, EntranceErrorConstants.USER_RESOURCE_EXHAUSTION, "Session创建失败,当前申请资源%s,队列可用资源%s,请检查资源配置是否合理"),
- ErrorCode(("远程服务器没有足够资源实例化[a-zA-Z]+ " +
- "Session,通常是由于您设置【驱动内存】或【客户端内存】过高导致的,建议kill脚本,调低参数后重新提交!等待下次调度...").r.unanchored, EntranceErrorConstants.YARN_RESOURCE_EXHAUSTION, "Session创建失败,服务器资源不足,请稍后再试"),
- ErrorCode(("request resources from ResourceManager has reached 560++ tries, give up and mark" +
- " it as FAILED.").r.unanchored, EntranceErrorConstants.JOB_COMMIT_EXCEED_MAX_TIME, "Session创建失败,队列资源不足,请稍后再试"),
- ErrorCode("Caused by:\\s*java.io.FileNotFoundException".r.unanchored,
- EntranceErrorConstants.FILE_NOT_EXIST, "文件%s不存在"),
- ErrorCode("OutOfMemoryError".r.unanchored,EntranceErrorConstants.OUT_OF_MEMORY,
- "Java进程内存溢出"),
- ErrorCode(("Permission denied:\\s*user=[a-zA-Z0-9_]+,\\s*access=[A-Z]+\\s*,\\s*inode=\"" +
- "([a-zA-Z0-9/_\\.]+)\"").r.unanchored, EntranceErrorConstants.PERMISSION_DENIED,
- "%s无权限访问,请申请开通数据表权限"),
- ErrorCode("Database '([a-zA-Z_0-9]+)' not found".r.unanchored, EntranceErrorConstants.DATABASE_NOT_FOUND,
- "数据库%s不存在,请检查引用的数据库是否有误"),
- ErrorCode("Database does not exist: ([a-zA-Z_0-9]+)".r.unanchored, EntranceErrorConstants.DATABASE_NOT_FOUND,
- "数据库%s不存在,请检查引用的数据库是否有误"),
- ErrorCode("Table or view not found: ([`\\.a-zA-Z_0-9]+)".r.unanchored,
- EntranceErrorConstants.TABLE_NOT_FOUND,
- "表%s不存在,请检查引用的表是否有误"),
- ErrorCode("Table not found '([a-zA-Z_0-9]+)'".r.unanchored, EntranceErrorConstants.TABLE_NOT_FOUND,
- "表%s不存在,请检查引用的表是否有误"),
- ErrorCode("cannot resolve '`(.+)`' given input columns".r.unanchored,
- EntranceErrorConstants.FIELD_NOT_FOUND,
- "字段%s不存在,请检查引用的字段是否有误"),
- ErrorCode(" Invalid table alias or column reference '(.+)':".r.unanchored, EntranceErrorConstants.FIELD_NOT_FOUND, "字段%s不存在,请检查引用的字段是否有误"),
- ErrorCode("([a-zA-Z_0-9]+) is not a valid partition column in table ([`\\.a-zA-Z_0-9]+)".r
- .unanchored, EntranceErrorConstants.PARTITION_FIELD_NOT_FOUND, "分区字段%s不存在,请检查引用的表%s是否为分区表或分区字段有误"),
- ErrorCode("Partition spec \\{(\\S+)\\} contains non-partition columns".r.unanchored,
- EntranceErrorConstants.PARTITION_FIELD_NOT_FOUND,
- "分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误"),
- ErrorCode("table is not partitioned but partition spec exists:\\{(.+)\\}".r.unanchored,
- EntranceErrorConstants.PARTITION_FIELD_NOT_FOUND,
- "分区字段%s不存在,请检查引用的表是否为分区表或分区字段有误"),
- ErrorCode("extraneous input '\\)'".r.unanchored, EntranceErrorConstants.BRACKETS_NOT_MATCH,
- "括号不匹配,请检查代码中括号是否前后匹配"),
- ErrorCode("missing EOF at '\\)'".r.unanchored, EntranceErrorConstants.BRACKETS_NOT_MATCH,
- "括号不匹配,请检查代码中括号是否前后匹配"),
- ErrorCode("expression '(\\S+)' is neither present in the group by".r.unanchored,
- EntranceErrorConstants.GROUP_BY_ERROR,
- "非聚合函数%s必须写在group by中,请检查代码的group by语法"),
- ErrorCode(("grouping expressions sequence is empty,\\s?and '(\\S+)' is not an aggregate " +
- "function").r.unanchored, EntranceErrorConstants.GROUP_BY_ERROR, "非聚合函数%s必须写在group by中,请检查代码的group by语法"),
- ErrorCode("Expression not in GROUP BY key '(\\S+)'".r.unanchored, EntranceErrorConstants.GROUP_BY_ERROR,
- "非聚合函数%s必须写在group " +
- "by中,请检查代码的group by语法"),
- ErrorCode("Undefined function: '(\\S+)'".r.unanchored, EntranceErrorConstants.FUNCTION_UNFOUND_ERROR,
- "未知函数%s,请检查代码中引用的函数是否有误"),
- ErrorCode("Invalid function '(\\S+)'".r.unanchored, EntranceErrorConstants.FUNCTION_UNFOUND_ERROR,
- "未知函数%s,请检查代码中引用的函数是否有误"),
- ErrorCode("Reference '(\\S+)' is ambiguous".r.unanchored, EntranceErrorConstants.FIELD_NAME_CONFLICT,
- "字段%s存在名字冲突,请检查子查询内是否有同名字段"),
- ErrorCode("Ambiguous column Reference '(\\S+)' in subquery".r.unanchored, EntranceErrorConstants.FIELD_NAME_CONFLICT, "字段%s存在名字冲突,请检查子查询内是否有同名字段"),
- ErrorCode("Column '(\\S+)' Found in more than One Tables/Subqueries".r.unanchored,
- EntranceErrorConstants.COLUMN_ERROR,
- "字段%s必须指定表或者子查询别名,请检查该字段来源"),
- ErrorCode("Table or view '(\\S+)' already exists in database '(\\S+)'".r.unanchored,
- EntranceErrorConstants.TABLE_EXIST,
- "表%s在数据库%s中已经存在,请删除相应表后重试"),
- ErrorCode("Table (\\S+) already exists".r.unanchored,EntranceErrorConstants.TABLE_EXIST, "表%s在数据库中已经存在,请删除相应表后重试"),
- ErrorCode("Table already exists".r.unanchored, EntranceErrorConstants.TABLE_EXIST, "表%s在数据库中已经存在,请删除相应表后重试"),
- ErrorCode("""AnalysisException: (\S+) already exists""".r.unanchored, EntranceErrorConstants.TABLE_EXIST, "表%s在数据库中已经存在,请删除相应表后重试"),
- ErrorCode("java.io.FileNotFoundException: (\\S+) \\(No such file or directory\\)".r
- .unanchored,EntranceErrorConstants.FILE_NOT_FOUND_EXCEPTION,"找不到导入文件地址:%s"),
- ErrorCode(("java.io.IOException: Permission denied(.+)at org.apache.poi.xssf.streaming" +
- ".SXSSFWorkbook.createAndRegisterSXSSFSheet").r.unanchored,EntranceErrorConstants.EXPORT_PERMISSION_ERROR,
- "导出为excel时临时文件目录权限异常"),
- ErrorCode("java.io.IOException: Mkdirs failed to create (\\S+) (.+)".r.unanchored,
- EntranceErrorConstants.EXPORT_CREATE_DIR_ERROR,
- "导出文件时无法创建目录:%s"),
- ErrorCode("""ImportError: No module named (\S+)""".r.unanchored,EntranceErrorConstants.IMPORT_ERROR ,
- "导入模块错误,系统没有%s模块,请联系运维人员安装"),
- ErrorCode(
- """requires that the data to be inserted have the same number of columns as the
- |target table""".r.unanchored, EntranceErrorConstants.NUMBER_NOT_MATCH,
- "插入目标表字段数量不匹配,请检查代码!"),
- ErrorCode("""missing \) at '(\S+)' near '<EOF>'""".r.unanchored, EntranceErrorConstants.MISSING_BRACKETS,
- "%s处括号不匹配,请检查代码!"),
- ErrorCode("""due to data type mismatch: differing types in""".r.unanchored,
- EntranceErrorConstants.TYPE_NOT_MATCH, "数据类型不匹配,请检查代码!"),
- ErrorCode("""Invalid column reference (\S+)""".r.unanchored, EntranceErrorConstants.FIELD_REF_ERROR,
- "字段%s引用有误,请检查字段是否存在!"),
- ErrorCode("""Can't extract value from (\S+): need struct type but got string""".r
- .unanchored, EntranceErrorConstants.FIELD_EXTRACT_ERROR, "字段%s提取数据失败"),
- ErrorCode("""mismatched input '(\S+)' expecting""".r.unanchored, EntranceErrorConstants.INPUT_MISSING_MATCH,
- "括号或者关键字不匹配,请检查代码!"),
- ErrorCode("""GROUP BY position (\S+) is not in select list""".r.unanchored,
- EntranceErrorConstants.GROUPBY_MISMATCH_ERROR, "group by " +
- "位置2不在select列表中,请检查代码!"),
- ErrorCode("""'NoneType' object""".r.unanchored, EntranceErrorConstants.NONE_TYPE_ERROR,
- "代码中存在NoneType空类型变量,请检查代码"),
- ErrorCode("""IndexError:List index out of range""".r.unanchored, EntranceErrorConstants.INDEX_OUT_OF_RANGE,
- "数组越界"),
- ErrorCode("""Can't extract value from (\S+): need struct type but got string""".r.unanchored, EntranceErrorConstants.FIELD_EXTRACT_ERROR, "字段提取数据失败请检查字段类型"),
- ErrorCode(
- """Cannot insert into target table because column number/types are different '
- |(\S+)'""".r.unanchored, EntranceErrorConstants.NUMBER_TYPE_DIFF_ERROR, "插入数据未指定目标表字段%s,请检查代码!"),
- ErrorCode("""Invalid table alias '(\S+)'""".r.unanchored,EntranceErrorConstants.INVALID_TABLE,
- "表别名%s错误,请检查代码!"),
- ErrorCode("""UDFArgumentException Argument expected""".r.unanchored, EntranceErrorConstants
- .UDF_ARG_ERROR, "UDF函数未指定参数,请检查代码!"),
- ErrorCode("""aggregate functions are not allowed in GROUP BY""".r.unanchored,
- EntranceErrorConstants.AGG_FUNCTION_ERROR,
- "聚合函数%s不能写在group by 中,请检查代码!"),
- ErrorCode("SyntaxError".r.unanchored,EntranceErrorConstants.SYNTAX_ERROR , "您的代码有语法错误,请您修改代码之后执行"),
- ErrorCode("""Table not found""".r.unanchored, EntranceErrorConstants.TABLE_NOT_FOUND, "表不存在,请检查引用的表是否有误"),
- ErrorCode("""No matching method""".r.unanchored, EntranceErrorConstants.FIELD_NOT_FOUND,
- "函数使用错误,请检查您使用的函数方式"),
- ErrorCode("""is killed by user""".r.unanchored, EntranceErrorConstants.USER_KILL_JOB, "用户主动kill任务"),
- ErrorCode("""name '(\S+)' is not defined""".r.unanchored,EntranceErrorConstants.PY_VAL_NOT_DEF,
- "python代码变量%s未定义"),
- ErrorCode("""Undefined function:\s+'(\S+)'""".r.unanchored, EntranceErrorConstants.PY_UDF_NOT_DEF,
- "python udf %s 未定义"),
- ErrorCode("FAILED: ParseException".r.unanchored, EntranceErrorConstants.SQL_ERROR,
- "您的sql代码可能有语法错误,请检查sql代码"),
- ErrorCode("org.apache.spark.sql.catalyst.parser.ParseException".r.unanchored,
- EntranceErrorConstants.SQL_ERROR,
- "您的sql代码可能有语法错误,请检查sql代码"),
- ErrorCode("""ParseException:""".r.unanchored,EntranceErrorConstants.PARSE_ERROR, "脚本语法有误"),
- ErrorCode("""Permission denied""".r.unanchored, EntranceErrorConstants.PERMISSION_DENIED_ERROR, "您可能没有相关权限"),
- ErrorCode("""cannot concatenate '(\S+)' and '(\S+)'""".r.unanchored, EntranceErrorConstants
- .CANNOT_CONCAT,
- "python执行不能将%s和%s两种类型进行连接"),
- ErrorCode("""Py4JJavaError: An error occurred""".r.unanchored, EntranceErrorConstants.PY4JJAVA_ERROR,
- "pyspark执行失败,可能是语法错误或stage失败"),
- ErrorCode("""unexpected indent""".r.unanchored, EntranceErrorConstants.UNEXPECT_INDENT, "python代码缩进对齐有误"),
- ErrorCode("""is exceeded""".r.unanchored, EntranceErrorConstants.EXCEED, "个人库超过限制"),
- ErrorCode("""unexpected indent""".r.unanchored, EntranceErrorConstants.UNEXPECT_INDENT, "python代码缩进有误"),
- ErrorCode("""unexpected character after line""".r.unanchored, EntranceErrorConstants
- .UNEXPECT_CHARACTER, "python代码反斜杠后面必须换行"),
- ErrorCode("""Invalid row number""".r.unanchored, EntranceErrorConstants.INVALID_ROW_NUMBER,
- "导出Excel表超过最大限制1048575"),
- ErrorCode("""parquet.io.ParquetDecodingException""".r.unanchored,EntranceErrorConstants.PARQUET_DECODE_ERROR ,
- "python save as " +
- "table未指定格式,默认用parquet保存,hive查询报错"),
- ErrorCode("""errCode: 11011""".r.unanchored, EntranceErrorConstants.MEM_EXHAUST, "远程服务器内存资源不足"),
- ErrorCode("""errCode: 11012""".r.unanchored, EntranceErrorConstants.CPU_EXHAUST, "远程服务器CPU资源不足"),
- ErrorCode("""errCode: 11013""".r.unanchored, EntranceErrorConstants.SERVER_EXHAUST, "远程服务器实例资源不足"),
- ErrorCode("""errCode: 11014""".r.unanchored, EntranceErrorConstants.QUEUE_CPU_EXHAUST,
- "队列CPU资源不足"),
- ErrorCode("""errCode: 11015""".r.unanchored, EntranceErrorConstants.QUEUE_MEM_EXHAUST, "队列内存资源不足"),
- ErrorCode("""errCode: 11016""".r.unanchored, EntranceErrorConstants.QUEUE_NUMBER_EXHAUST, "队列实例数超过限制"),
- ErrorCode("""errCode: 11017""".r.unanchored, EntranceErrorConstants.ENGINE_EXHAUST, "超出全局计算引擎实例限制"),
- ErrorCode("""资源不足""".r.unanchored, EntranceErrorConstants.YARN_RESOURCE_EXHAUSTION, "资源不足,启动引擎失败"),
- ErrorCode("""获取Yarn队列信息异常""".r.unanchored, EntranceErrorConstants.QUERY_YARN_ERROR, "获取Yarn队列信息异常," +
- "可能是您设置的yarn队列不存在")
- )
- }
-}
-
-/**
- * errorCodeManager的单例对象,主要是用来生成固定的错误码
- */
-object FixedErrorCodeManager extends FileErrorCodeManager {
-
- override val errorCodeFile: String = ""
-
- override def getErrorCodes: Array[ErrorCode] = getCommonErrorCodes
-}
/**
* this error code is from errorcode server
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/parser/CommonEntranceParser.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/parser/CommonEntranceParser.scala
index bdb70bf..4cb0f62 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/parser/CommonEntranceParser.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/parser/CommonEntranceParser.scala
@@ -122,7 +122,7 @@
private def generateAndVerifyCodeLanguageLabel(runType: String, labels: util.Map[String, Label[_]]): Unit = {
val engineRunTypeLabel = labels.getOrElse(LabelKeyConstant.CODE_TYPE_KEY, null)
if (StringUtils.isBlank(runType) && null == engineRunTypeLabel) {
- val msg = s"You need to specify runType in execution content, such as spark-2.4.3"
+ val msg = s"You need to specify runType in execution content, such as sql"
warn(msg)
throw new EntranceIllegalParamException(EntranceErrorCode.LABEL_PARAMS_INVALID.getErrCode,
EntranceErrorCode.LABEL_PARAMS_INVALID.getDesc + msg)
@@ -196,7 +196,7 @@
labelList.add(runTypeLabel)
labelList.add(userCreatorLabel)
if (jobReq.getParams != null ) {
- val labelMap = jobReq.getParams.getOrDefault(TaskConstant.LABELS, new util.HashMap[String, String]()).asInstanceOf[util.Map[String, Object]]
+ val labelMap = params.getOrDefault(TaskConstant.LABELS, new util.HashMap[String, String]()).asInstanceOf[util.Map[String, Object]]
if (null != labelMap && !labelMap.isEmpty) {
val list: util.List[Label[_]] = labelBuilderFactory.getLabels(labelMap.asInstanceOf[util.Map[String, AnyRef]])
labelList.addAll(list)
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulRemote.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulRemote.scala
index 10862d7..676fbe3 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulRemote.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/restful/EntranceRestfulRemote.scala
@@ -50,8 +50,8 @@
@RequestMapping(value = Array("/entrance/{id}/log"), method = Array(RequestMethod.POST))
def log(@Context req: HttpServletRequest, @PathVariable("id") id: String): Response
- @RequestMapping(value = Array("/entrance/killJobs"), method = Array(RequestMethod.POST))
- def killJobs(@Context req: HttpServletRequest, jsonNode: JsonNode): Response
+ @RequestMapping(value = Array("/entrance/{id}/killJobs"), method = Array(RequestMethod.POST))
+ def killJobs(@Context req: HttpServletRequest, jsonNode: JsonNode, @PathVariable("id") id: String): Response
@RequestMapping(value = Array("/entrance/{id}/kill"), method = Array(RequestMethod.POST))
def kill(@PathVariable("id") id: String, @QueryParam("taskID") taskID:Long): Response
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/scheduler/EntranceGroupFactory.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/scheduler/EntranceGroupFactory.scala
index 19317a8..9af588b 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/scheduler/EntranceGroupFactory.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/scheduler/EntranceGroupFactory.scala
@@ -21,7 +21,7 @@
import com.webank.wedatasphere.linkis.common.conf.{CommonVars, Configuration}
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.entrance.conf.EntranceConfiguration
-import com.webank.wedatasphere.linkis.entrance.exception.EntranceErrorException
+import com.webank.wedatasphere.linkis.entrance.exception.{EntranceErrorCode, EntranceErrorException}
import com.webank.wedatasphere.linkis.entrance.execute.EntranceJob
import com.webank.wedatasphere.linkis.governance.common.protocol.conf.{RequestQueryEngineConfig, ResponseQueryConfig}
import com.webank.wedatasphere.linkis.manager.label.entity.Label
@@ -89,7 +89,13 @@
groupNameToGroups.get(groupName)
}
- override def getGroup(groupName: String): Group = groupNameToGroups.get(groupName)
+ override def getGroup(groupName: String): Group = {
+ val group = groupNameToGroups.get(groupName)
+ if(group == null){
+ throw new EntranceErrorException(EntranceErrorCode.GROUP_NOT_FOUND.getErrCode, s"group not found: ${groupName}")
+ }
+ group
+ }
}
object EntranceGroupFactory {
diff --git a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/utils/JobHistoryHelper.scala b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/utils/JobHistoryHelper.scala
index 7ae4fd6..532030f 100644
--- a/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/utils/JobHistoryHelper.scala
+++ b/linkis-computation-governance/linkis-entrance/src/main/scala/com/webank/wedatasphere/linkis/entrance/utils/JobHistoryHelper.scala
@@ -1,19 +1,23 @@
package com.webank.wedatasphere.linkis.entrance.utils
-import java.util
-
import com.webank.wedatasphere.linkis.common.exception.ErrorException
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.entrance.conf.EntranceConfiguration
import com.webank.wedatasphere.linkis.entrance.exception.JobHistoryFailedException
+import com.webank.wedatasphere.linkis.entrance.execute.EntranceJob
import com.webank.wedatasphere.linkis.governance.common.constant.job.JobRequestConstants
import com.webank.wedatasphere.linkis.governance.common.entity.job.{JobRequest, SubJobDetail, SubJobInfo}
-import com.webank.wedatasphere.linkis.governance.common.entity.task.{RequestPersistTask, RequestQueryTask, RequestUpdateTask, ResponsePersist}
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.{JobDetailReqUpdate, JobReqQuery, JobReqUpdate, JobRespProtocol}
+import com.webank.wedatasphere.linkis.governance.common.protocol.job._
import com.webank.wedatasphere.linkis.protocol.query.cache.{CacheTaskResult, RequestReadCache}
import com.webank.wedatasphere.linkis.rpc.Sender
import com.webank.wedatasphere.linkis.scheduler.queue.SchedulerEventState
+import java.util
+import javax.servlet.http.HttpServletRequest
+import org.apache.commons.lang.StringUtils
+import sun.net.util.IPAddressUtil
+
+import scala.collection.JavaConversions._
object JobHistoryHelper extends Logging{
@@ -35,6 +39,22 @@
else task.getStatus
}
+ def getRequestIpAddr(req: HttpServletRequest ): String = {
+ val addrList = List(Option(req.getHeader("x-forwarded-for")).getOrElse("").split(",")(0),
+ Option(req.getHeader("Proxy-Client-IP")).getOrElse(""),
+ Option(req.getHeader("WL-Proxy-Client-IP")).getOrElse(""),
+ Option(req.getHeader("HTTP_CLIENT_IP")).getOrElse(""),
+ Option(req.getHeader("HTTP_X_FORWARDED_FOR")).getOrElse("")
+ )
+ val afterProxyIp = addrList.find(ip => {StringUtils.isNotEmpty(ip) &&
+ (IPAddressUtil.isIPv4LiteralAddress(ip) || IPAddressUtil.isIPv6LiteralAddress(ip))}).getOrElse("")
+ if(StringUtils.isNotEmpty(afterProxyIp)){
+ afterProxyIp
+ }else{
+ req.getRemoteAddr
+ }
+ }
+
/**
* 对于一个在内存中找不到这个任务的话,可以直接干掉
* @param taskID
@@ -49,11 +69,38 @@
val jobRequest = new JobRequest
jobRequest.setId(taskID)
jobRequest.setStatus(SchedulerEventState.Cancelled.toString)
+ jobRequest.setProgress(EntranceJob.JOB_COMPLETED_PROGRESS.toString)
val jobReqUpdate = JobReqUpdate(jobRequest)
sender.ask(jobReqUpdate)
sender.ask(jobDetailReqUpdate)
}
+ /**
+ * 批量强制kill
+ * @param taskIdList
+ */
+ def forceBatchKill(taskIdList: util.ArrayList[java.lang.Long]):Unit = {
+ val subJobInfoList = new util.ArrayList[SubJobInfo]()
+ val jobReqList = new util.ArrayList[JobRequest]()
+ taskIdList.foreach(taskID => {
+ val subJobInfo = new SubJobInfo
+ val subJobDetail = new SubJobDetail
+ subJobDetail.setId(taskID)
+ subJobDetail.setStatus(SchedulerEventState.Cancelled.toString)
+ subJobInfo.setSubJobDetail(subJobDetail)
+ subJobInfoList.add(subJobInfo)
+ val jobRequest = new JobRequest
+ jobRequest.setId(taskID)
+ jobRequest.setStatus(SchedulerEventState.Cancelled.toString)
+ jobRequest.setProgress(EntranceJob.JOB_COMPLETED_PROGRESS.toString)
+ jobReqList.add(jobRequest)
+ })
+ val jobDetailReqBatchUpdate = JobDetailReqBatchUpdate(subJobInfoList)
+ val jobReqBatchUpdate = JobReqBatchUpdate(jobReqList)
+ sender.ask(jobDetailReqBatchUpdate)
+ sender.ask(jobReqBatchUpdate)
+ }
+
private def getTaskByTaskID(taskID:Long): JobRequest = {
val jobRequest = new JobRequest
jobRequest.setId(taskID)
diff --git a/linkis-computation-governance/linkis-jdbc-driver/pom.xml b/linkis-computation-governance/linkis-jdbc-driver/pom.xml
index d0ad520..2ed089b 100644
--- a/linkis-computation-governance/linkis-jdbc-driver/pom.xml
+++ b/linkis-computation-governance/linkis-jdbc-driver/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!-- <relativePath>../../pom.xml</relativePath>-->
</parent>
<artifactId>linkis-jdbc-driver</artifactId>
diff --git a/linkis-computation-governance/linkis-manager/label-common/pom.xml b/linkis-computation-governance/linkis-manager/label-common/pom.xml
index 6a3bb02..633c710 100644
--- a/linkis-computation-governance/linkis-manager/label-common/pom.xml
+++ b/linkis-computation-governance/linkis-manager/label-common/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/conf/LabelCommonConfig.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/conf/LabelCommonConfig.java
index c567469..9ecb18c 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/conf/LabelCommonConfig.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/conf/LabelCommonConfig.java
@@ -20,7 +20,7 @@
public class LabelCommonConfig {
- public final static CommonVars<String> LABEL_FACTORY_CLASS = CommonVars.apply("wds.linkis.label.label.com.webank.wedatasphere.linkis.entrance.factory.clazz", "");
+ public final static CommonVars<String> LABEL_FACTORY_CLASS = CommonVars.apply("wds.linkis.label.factory.clazz", "");
public final static CommonVars<Double> LABEL_SCORER_BASE_CORE = CommonVars.apply("wds.linkis.label.scorer.base.core", 1.0d);
@@ -30,7 +30,7 @@
public final static CommonVars<String> SPARK_ENGINE_VERSION = CommonVars.apply("wds.linkis.spark.engine.version", "2.4.3");
- public final static CommonVars<String> HIVE_ENGINE_VERSION = CommonVars.apply("wds.linkis.hive.engine.version", "1.2.1");
+ public final static CommonVars<String> HIVE_ENGINE_VERSION = CommonVars.apply("wds.linkis.hive.engine.version", "2.3.3");
public final static CommonVars<String> PYTHON_ENGINE_VERSION = CommonVars.apply("wds.linkis.python.engine.version", "python2");
@@ -44,5 +44,10 @@
public final static CommonVars<String> SHELL_ENGINE_VERSION = CommonVars.apply("wds.linkis.shell.engine.version", "1");
- public final static CommonVars<String> APPCONN_ENGINE_VERSION = CommonVars.apply("wds.linkis.appconn.engine.version", "v1");
+ public final static CommonVars<String> APPCONN_ENGINE_VERSION = CommonVars.apply("wds.linkis.appconn.engine.version", "1");
+
+ public final static CommonVars<String> FLINK_ENGINE_VERSION = CommonVars.apply("wds.linkis.flink.engine.version", "1.11.1");
+
+ public final static CommonVars<String> PERMANENT_LABEL = CommonVars.apply("wds.linkis.am.permanent.label", "tenant");
}
+
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/constant/LabelKeyConstant.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/constant/LabelKeyConstant.java
index cf030c0..6e63316 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/constant/LabelKeyConstant.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/constant/LabelKeyConstant.java
@@ -46,7 +46,6 @@
public static final String BIND_ENGINE_KEY = "bindEngine";
-
public static final String JOB_QUEUING_TIMEOUT_KEY = "jobQueuingTimeout";
public static final String JOB_RUNNING_TIMEOUT_KEY = "jobRunningTimeout";
@@ -55,6 +54,7 @@
public static final String LOAD_BALANCE_KEY = "loadBalance";
-
public static final String REUSE_EXCLUSION_KEY = "reuseExclusion";
+
+ public static final String TENANT_KEY = "tenant";
}
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabel.java
index 82dbcb8..08b2238 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabel.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabel.java
@@ -18,10 +18,13 @@
import java.util.List;
-public interface CombinedLabel extends Label<List<Label<?>>> {
+public interface CombinedLabel extends Label<List<Label<?>>>, ResourceLabel{
default List<Label<?>> getLabels() {
return getValue();
}
+ @Override
+ boolean equals(Object obj);
+
}
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabelImpl.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabelImpl.java
index 46b7025..7e252b1 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabelImpl.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/CombinedLabelImpl.java
@@ -80,6 +80,16 @@
}
@Override
+ public boolean equals(Object obj) {
+ if(obj != null && obj instanceof CombinedLabel){
+ return getStringValue().equals(((Label) obj).getStringValue());
+ }else{
+ return false;
+ }
+ }
+
+
+ @Override
public String toString() {
return "CombinedLabelImpl{" +
"key=" + getLabelKey() +
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/JobLabel.scala b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/JobLabel.scala
index 7cac687..1e0c6ee 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/JobLabel.scala
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/JobLabel.scala
@@ -19,6 +19,7 @@
package com.webank.wedatasphere.linkis.manager.label.entity
import java.util
+import com.webank.wedatasphere.linkis.manager.label.entity.annon.ValueSerialNum
class JobLabel extends GenericLabel {
@@ -27,6 +28,7 @@
def getJobId: String = Option(getValue).map(_.get("jobId")).orNull
+ @ValueSerialNum(0)
def setJobId(jobId: String): Unit = {
if (null == getValue) setValue(new util.HashMap[String, String])
getValue.put("jobId", jobId)
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/ResourceLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/ResourceLabel.java
new file mode 100644
index 0000000..b732d11
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/ResourceLabel.java
@@ -0,0 +1,4 @@
+package com.webank.wedatasphere.linkis.manager.label.entity;
+
+public interface ResourceLabel {
+}
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/TenantLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/TenantLabel.java
new file mode 100644
index 0000000..dc37b60
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/TenantLabel.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.manager.label.entity;
+
+import com.webank.wedatasphere.linkis.manager.label.constant.LabelConstant;
+import com.webank.wedatasphere.linkis.manager.label.constant.LabelKeyConstant;
+import com.webank.wedatasphere.linkis.manager.label.entity.annon.ValueSerialNum;
+import com.webank.wedatasphere.linkis.manager.label.exception.LabelErrorException;
+import org.apache.commons.lang.StringUtils;
+
+import java.util.HashMap;
+
+public class TenantLabel extends GenericLabel implements EMNodeLabel, EngineNodeLabel, UserModifiable {
+
+ public TenantLabel() {
+ setLabelKey(LabelKeyConstant.TENANT_KEY);
+ }
+
+ @ValueSerialNum(0)
+ public void setTenant(String tenant){
+ if (getValue() == null) {
+ setValue(new HashMap<>());
+ }
+ getValue().put(getLabelKey(), tenant);
+ }
+
+ public String getTenant(){
+ if(getValue() == null){
+ return null;
+ }
+ return getValue().get(getLabelKey());
+ }
+
+ @Override
+ public Feature getFeature() {
+ return Feature.CORE;
+ }
+
+ @Override
+ public Boolean getModifiable() {
+ return true;
+ }
+
+ @Override
+ public void valueCheck(String stringValue) throws LabelErrorException {
+ if(!StringUtils.isEmpty(stringValue)){
+ if(stringValue.split(SerializableLabel.VALUE_SEPARATOR).length != 1){
+ throw new LabelErrorException(LabelConstant.LABEL_BUILDER_ERROR_CODE,
+ "标签route的值设置错误,只能设置1个值,并且不能使用符号" + VALUE_SEPARATOR);
+ }
+ }
+ }
+}
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/em/EMInstanceLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/em/EMInstanceLabel.java
index 9fee280..e5756c4 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/em/EMInstanceLabel.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/em/EMInstanceLabel.java
@@ -22,11 +22,12 @@
import com.webank.wedatasphere.linkis.manager.label.entity.EngineNodeLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.GenericLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.annon.ValueSerialNum;
+import com.webank.wedatasphere.linkis.manager.label.entity.ResourceLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.node.NodeInstanceLabel;
import java.util.HashMap;
-public class EMInstanceLabel extends GenericLabel implements NodeInstanceLabel, EMNodeLabel, EngineNodeLabel {
+public class EMInstanceLabel extends GenericLabel implements NodeInstanceLabel, EMNodeLabel, EngineNodeLabel, ResourceLabel {
public EMInstanceLabel() {
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/CodeLanguageLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/CodeLanguageLabel.java
index 147a430..eb06597 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/CodeLanguageLabel.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/CodeLanguageLabel.java
@@ -24,7 +24,7 @@
import java.util.HashMap;
-public class CodeLanguageLabel extends GenericLabel implements EngineNodeLabel {
+public class CodeLanguageLabel extends GenericLabel {
public CodeLanguageLabel() {
setLabelKey(LabelKeyConstant.CODE_TYPE_KEY);
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/ConcurrentEngineConnLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/ConcurrentEngineConnLabel.java
index d40685a..832c772 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/ConcurrentEngineConnLabel.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/ConcurrentEngineConnLabel.java
@@ -30,11 +30,11 @@
}
@ValueSerialNum(0)
- public void setParallelism(int parallelism) {
+ public void setParallelism(String parallelism) {
if (null == getValue()) {
setValue(new HashMap<>());
}
- getValue().put("parallelism", Integer.toString(parallelism));
+ getValue().put("parallelism", parallelism);
}
public String getParallelism() {
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineConnModeLabel.scala b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineConnModeLabel.scala
index e693867..3378336 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineConnModeLabel.scala
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineConnModeLabel.scala
@@ -1,14 +1,20 @@
package com.webank.wedatasphere.linkis.manager.label.entity.engine
import java.util
+import com.webank.wedatasphere.linkis.manager.label.entity.{EngineNodeLabel, Feature, GenericLabel}
+import com.webank.wedatasphere.linkis.manager.label.entity.annon.ValueSerialNum
+
import com.webank.wedatasphere.linkis.manager.label.entity.GenericLabel
-class EngineConnModeLabel extends GenericLabel {
+class EngineConnModeLabel extends GenericLabel with EngineNodeLabel {
setLabelKey("engineConnMode")
+ override def getFeature = Feature.CORE
+
+ @ValueSerialNum(0)
def setEngineConnMode(engineConnMode: String): Unit = {
if (null == getValue) setValue(new util.HashMap[String, String])
getValue.put("engineConnMode", engineConnMode)
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineInstanceLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineInstanceLabel.java
index 8afa896..66bb21e 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineInstanceLabel.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineInstanceLabel.java
@@ -20,12 +20,13 @@
import com.webank.wedatasphere.linkis.manager.label.constant.LabelKeyConstant;
import com.webank.wedatasphere.linkis.manager.label.entity.EngineNodeLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.GenericLabel;
+import com.webank.wedatasphere.linkis.manager.label.entity.ResourceLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.annon.ValueSerialNum;
import com.webank.wedatasphere.linkis.manager.label.entity.node.NodeInstanceLabel;
import java.util.HashMap;
-public class EngineInstanceLabel extends GenericLabel implements NodeInstanceLabel, EngineNodeLabel {
+public class EngineInstanceLabel extends GenericLabel implements NodeInstanceLabel, EngineNodeLabel, ResourceLabel {
public EngineInstanceLabel() {
setLabelKey(LabelKeyConstant.ENGINE_INSTANCE_KEY);
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineType.scala b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineType.scala
index b60a90d..6f854d0 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineType.scala
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineType.scala
@@ -38,6 +38,8 @@
val IO_ENGINE_HDFS = Value("io_hdfs")
+ val FPS = Value("fps")
+
val PIPELINE = Value("pipeline")
val ES = Value("es")
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineTypeLabel.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineTypeLabel.java
index c90c819..f7239c4 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineTypeLabel.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/EngineTypeLabel.java
@@ -17,10 +17,7 @@
package com.webank.wedatasphere.linkis.manager.label.entity.engine;
import com.webank.wedatasphere.linkis.manager.label.constant.LabelKeyConstant;
-import com.webank.wedatasphere.linkis.manager.label.entity.EMNodeLabel;
-import com.webank.wedatasphere.linkis.manager.label.entity.EngineNodeLabel;
-import com.webank.wedatasphere.linkis.manager.label.entity.Feature;
-import com.webank.wedatasphere.linkis.manager.label.entity.GenericLabel;
+import com.webank.wedatasphere.linkis.manager.label.entity.*;
import com.webank.wedatasphere.linkis.manager.label.entity.annon.ValueSerialNum;
import org.apache.commons.lang.StringUtils;
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/RunType.scala b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/RunType.scala
index 39f88d7..451c659 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/RunType.scala
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/entity/engine/RunType.scala
@@ -23,6 +23,7 @@
val HIVE = Value("hql")
val SCALA = Value("scala")
val PYTHON = Value("python")
+ val JAVA = Value("java")
val PYSPARK = Value("py")
val R = Value("r")
val STORAGE = Value("out")
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/EngineTypeLabelCreator.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/EngineTypeLabelCreator.java
index ae9ed94..49df9d9 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/EngineTypeLabelCreator.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/EngineTypeLabelCreator.java
@@ -46,6 +46,7 @@
defaultVersion.put(EngineType.PIPELINE().toString(), LabelCommonConfig.PIPELINE_ENGINE_VERSION.getValue());
defaultVersion.put(EngineType.SHELL().toString(), LabelCommonConfig.SHELL_ENGINE_VERSION.getValue());
defaultVersion.put(EngineType.APPCONN().toString(), LabelCommonConfig.APPCONN_ENGINE_VERSION.getValue());
+ defaultVersion.put(EngineType.FLINK().toString(), LabelCommonConfig.FLINK_ENGINE_VERSION.getValue());
defaultVersion.put("*", "*");
}
}
diff --git a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/LabelUtils.java b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/LabelUtils.java
index 55a41a4..bec6492 100644
--- a/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/LabelUtils.java
+++ b/linkis-computation-governance/linkis-manager/label-common/src/main/java/com/webank/wedatasphere/linkis/manager/label/utils/LabelUtils.java
@@ -276,9 +276,10 @@
for (Label<?> label : labelList) {
if (!labelMap.containsKey(label.getLabelKey())) {
labelMap.put(label.getLabelKey(), label.getStringValue());
- } else {
- throw new LabelRuntimeException(LabelConstant.LABEL_UTIL_CONVERT_ERROR_CODE, "Got more than one " + label.getLabelKey() + " label, some will be dropped, use labelsToPairList instead.");
}
+ /*else {
+ throw new LabelRuntimeException(LabelConstant.LABEL_UTIL_CONVERT_ERROR_CODE, "Got more than one " + label.getLabelKey() + " label, some will be dropped, use labelsToPairList instead.");
+ }*/
}
return labelMap;
}
diff --git a/linkis-computation-governance/linkis-manager/label-manager/pom.xml b/linkis-computation-governance/linkis-manager/label-manager/pom.xml
index 3364e44..71e9609 100644
--- a/linkis-computation-governance/linkis-manager/label-manager/pom.xml
+++ b/linkis-computation-governance/linkis-manager/label-manager/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!--<relativePath>../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/LabelManagerUtils.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/LabelManagerUtils.scala
new file mode 100644
index 0000000..d424b56
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/LabelManagerUtils.scala
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.manager.label
+
+import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLabel
+import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
+import com.webank.wedatasphere.linkis.manager.label.entity.Label
+
+object LabelManagerUtils {
+
+ val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
+
+ def convertPersistenceLabel(label: Label[_]): PersistenceLabel = {
+ label match {
+ case persistenceLabel: PersistenceLabel =>
+ persistenceLabel
+ case _ =>
+ labelFactory.convertLabel[PersistenceLabel](label, classOf[PersistenceLabel])
+ }
+ }
+}
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/conf/LabelManagerConf.scala
similarity index 63%
copy from linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala
copy to linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/conf/LabelManagerConf.scala
index 66fd946..cb7feb4 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserTimeoutNodeResource.scala
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/conf/LabelManagerConf.scala
@@ -1,12 +1,9 @@
/*
* Copyright 2019 WeBank
- *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
- *
* http://www.apache.org/licenses/LICENSE-2.0
- *
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -14,17 +11,13 @@
* limitations under the License.
*/
-package com.webank.wedatasphere.linkis.manager.engineplugin.common.resource
+package com.webank.wedatasphere.linkis.manager.label.conf
+
+import com.webank.wedatasphere.linkis.common.conf.CommonVars
+
+object LabelManagerConf {
+ val LONG_LIVED_LABEL = CommonVars("wds.linkis.label.node.long.lived.label.keys", "tenant").getValue
-/*
-/**
- */
-class UserTimeoutNodeResource extends UserNodeResource with TimeoutNodeResource {
- private var timeout: Long = _
-
- override def getTimeout: Long = timeout
-
- override def setTimeout(timeout: Long): Unit = this.timeout = timeout
-}*/
+}
diff --git a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/NodeLabelService.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/NodeLabelService.scala
index 6d718b6..a787081 100644
--- a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/NodeLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/NodeLabelService.scala
@@ -64,6 +64,8 @@
def getNodesByLabels(labels: util.List[Label[_]]): util.List[ServiceInstance]
+ def removeLabelsFromNodeWithoutPermanent(instance: ServiceInstance, permanentLabel: Array[String])
+
def getNodesByLabel(label: Label[_]): util.List[ServiceInstance]
diff --git a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.scala
index b329d66..07b5d24 100644
--- a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.scala
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelRemoveService.scala
@@ -25,6 +25,7 @@
import com.webank.wedatasphere.linkis.manager.persistence.LabelManagerPersistence
import com.webank.wedatasphere.linkis.message.annotation.Receiver
import com.webank.wedatasphere.linkis.protocol.label.NodeLabelRemoveRequest
+import com.webank.wedatasphere.linkis.manager.label.conf.LabelCommonConfig
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@@ -38,22 +39,24 @@
@Autowired
private var labelPersistence: LabelManagerPersistence = _
+ private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
+
@Receiver
override def removeNodeLabel(nodeLabelRemoveRequest: NodeLabelRemoveRequest): Unit = {
info(s"Start to remove labels from node ${nodeLabelRemoveRequest.getServiceInstance}")
- nodeLabelService.removeLabelsFromNode(nodeLabelRemoveRequest.getServiceInstance)
+ val permanentLabelKey = LabelCommonConfig.PERMANENT_LABEL.getValue.split(",")
+ nodeLabelService.removeLabelsFromNodeWithoutPermanent(nodeLabelRemoveRequest.getServiceInstance,permanentLabelKey)
val persistenceLabel = if (nodeLabelRemoveRequest.isEngine) {
- val engineLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EngineInstanceLabel])
+ val engineLabel = labelFactory.createLabel(classOf[EngineInstanceLabel])
engineLabel.setInstance(nodeLabelRemoveRequest.getServiceInstance.getInstance)
engineLabel.setServiceName(nodeLabelRemoveRequest.getServiceInstance.getApplicationName)
- LabelBuilderFactoryContext.getLabelBuilderFactory.convertLabel(engineLabel, classOf[PersistenceLabel])
+ labelFactory.convertLabel(engineLabel, classOf[PersistenceLabel])
} else {
-
- val eMInstanceLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EMInstanceLabel])
+ val eMInstanceLabel = labelFactory.createLabel(classOf[EMInstanceLabel])
eMInstanceLabel.setServiceName(nodeLabelRemoveRequest.getServiceInstance.getApplicationName)
eMInstanceLabel.setInstance(nodeLabelRemoveRequest.getServiceInstance.getInstance)
- LabelBuilderFactoryContext.getLabelBuilderFactory.convertLabel(eMInstanceLabel, classOf[PersistenceLabel])
+ labelFactory.convertLabel(eMInstanceLabel, classOf[PersistenceLabel])
}
labelPersistence.removeLabel(persistenceLabel)
diff --git a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelService.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelService.scala
index 3a057e4..f0bebfe 100644
--- a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultNodeLabelService.scala
@@ -22,7 +22,10 @@
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.manager.common.entity.node.ScoreServiceInstance
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLabel
+import com.webank.wedatasphere.linkis.manager.common.utils.ManagerUtils
+import com.webank.wedatasphere.linkis.manager.label.LabelManagerUtils
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
+import com.webank.wedatasphere.linkis.manager.label.conf.LabelManagerConf
import com.webank.wedatasphere.linkis.manager.label.entity.Label.ValueRelation
import com.webank.wedatasphere.linkis.manager.label.entity.{Feature, Label, UserModifiable}
import com.webank.wedatasphere.linkis.manager.label.score.NodeLabelScorer
@@ -49,13 +52,14 @@
@Autowired
private var nodeLabelScorer: NodeLabelScorer = _
+
/**
- * Attach labels to node instance
- * TODO 该方法需要优化,应该batch插入
- *
- * @param instance node instance
- * @param labels label list
- */
+ * Attach labels to node instance
+ * TODO 该方法需要优化,应该batch插入
+ *
+ * @param instance node instance
+ * @param labels label list
+ */
@Transactional(rollbackFor = Array(classOf[Exception]))
override def addLabelsToNode( instance: ServiceInstance, labels: util.List[Label[_]]): Unit = {
if (null != labels && !labels.isEmpty) labels.foreach(addLabelToNode(instance, _))
@@ -63,43 +67,43 @@
@Transactional(rollbackFor = Array(classOf[Exception]))
override def addLabelToNode(instance: ServiceInstance, label: Label[_]): Unit = {
- // TODO: 上层应该有个初始化清理表格的方法
- val persistenceLabel = labelFactory.convertLabel(label, classOf[PersistenceLabel])
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
//Try to add
- tryToAddLabel(persistenceLabel);
- //2.查询出当前label的id
- val dbLabel = labelManagerPersistence.getLabelByKeyValue(persistenceLabel.getLabelKey, persistenceLabel.getStringValue)
- //3.根据instance 找出当前关联当前isntance的所有labels,看下有没和当前id重复的
- //2020/09/14 如果已经存在关联,则不关联
- //TODO 对于这样的记录查询,插入时有并发问题,可以考虑提供getLabelByServiceInstanceAndId方法,配合for update关键字,使用间隙锁
- val serviceRelationLabels = labelManagerPersistence.getLabelByServiceInstance(instance)
- if (!serviceRelationLabels.exists(_.getId.equals(dbLabel.getId))){
- //5.插入新的relation 需要抛出duplicateKey异常,回滚
- labelManagerPersistence.addLabelToNode(instance, util.Arrays.asList(dbLabel.getId))
+ val labelId = tryToAddLabel(persistenceLabel)
+ if (labelId > 0 ) {
+ val serviceRelationLabels = labelManagerPersistence.getLabelByServiceInstance(instance)
+ if (!serviceRelationLabels.exists(_.getId.equals(labelId))){
+ labelManagerPersistence.addLabelToNode(instance, util.Arrays.asList(labelId))
+ }
}
+
}
@Transactional(rollbackFor = Array(classOf[Exception]))
override def updateLabelToNode(instance: ServiceInstance, label: Label[_]): Unit = {
- val persistenceLabel = this.labelFactory.convertLabel(label, classOf[PersistenceLabel])
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
//Try to add
- tryToAddLabel(persistenceLabel)
- val dbLabel = labelManagerPersistence.getLabelByKeyValue(persistenceLabel.getLabelKey, persistenceLabel.getStringValue)
+ val labelId = tryToAddLabel(persistenceLabel)
+ if (labelId <=0 ) return
+ //val dbLabel = labelManagerPersistence.getLabelByKeyValue(persistenceLabel.getLabelKey, persistenceLabel.getStringValue)
//TODO: add method: getLabelsByServiceInstanceAndKey(instance, labelKey)
val nodeLabels = this.labelManagerPersistence.getLabelByServiceInstance(instance)
var needUpdate = true
+ val needRemoveIds = new java.util.ArrayList[Integer]()
nodeLabels.filter(_.getLabelKey.equals(label.getLabelKey)).foreach( nodeLabel =>{
- if(nodeLabel.getId.equals(dbLabel.getId)){
+ if(nodeLabel.getId.equals(labelId)){
needUpdate = false
}else{
- //TODO: add batch method: removeLabels(Ids)
- this.labelManagerPersistence.removeLabel(nodeLabel.getId)
+ needRemoveIds.add(nodeLabel.getId)
}
})
+ if (null != needRemoveIds && needRemoveIds.nonEmpty) {
+ this.labelManagerPersistence.removeNodeLabels(instance, needRemoveIds)
+ }
if (needUpdate) {
- val labelId = new util.ArrayList[Integer]()
- labelId.add(dbLabel.getId)
- this.labelManagerPersistence.addLabelToNode(instance, labelId)
+ val labelIds = new util.ArrayList[Integer]()
+ labelIds.add(labelId)
+ this.labelManagerPersistence.addLabelToNode(instance, labelIds)
}
}
@@ -122,7 +126,7 @@
labels.foreach(label => {
if(modifiableKeyList.contains(label.getLabelKey) && willBeUpdate.contains(label.getLabelKey)){
nodeLabels.filter(_.getLabelKey.equals(label.getLabelKey)).foreach(oldLabel => {
- val persistenceLabel = labelFactory.convertLabel(label, classOf[PersistenceLabel])
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
persistenceLabel.setId(oldLabel.getId)
labelManagerPersistence.updateLabel(persistenceLabel.getId, persistenceLabel)
})
@@ -131,13 +135,14 @@
}
if(!CollectionUtils.isEmpty(willBeAdd)) {
labels.filter(label => willBeAdd.contains(label.getLabelKey)).foreach(label => {
- if(modifiableKeyList.contains(label.getLabelKey)){
- val persistenceLabel = labelFactory.convertLabel(label, classOf[PersistenceLabel])
- tryToAddLabel(persistenceLabel)
- val dbLabel = labelManagerPersistence.getLabelByKeyValue(persistenceLabel.getLabelKey, persistenceLabel.getStringValue)
- val labelId = new util.ArrayList[Integer]()
- labelId.add(dbLabel.getId)
- labelManagerPersistence.addLabelToNode(instance, labelId)
+ if(modifiableKeyList.contains(label.getLabelKey)){
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
+ val labelId = tryToAddLabel(persistenceLabel)
+ if (labelId > 0) {
+ val labelIds = new util.ArrayList[Integer]()
+ labelIds.add(labelId)
+ labelManagerPersistence.addLabelToNode(instance, labelIds)
+ }
}
})
}
@@ -157,7 +162,16 @@
@Transactional(rollbackFor = Array(classOf[Exception]))
override def removeLabelsFromNode(instance: ServiceInstance): Unit = {
- labelManagerPersistence.removeNodeLabels(instance, labelManagerPersistence.getLabelByServiceInstance(instance).map(_.getId))
+ val removeLabels = labelManagerPersistence.getLabelByServiceInstance(instance).filter(label => ! LabelManagerConf.LONG_LIVED_LABEL.contains( label.getLabelKey))
+ labelManagerPersistence.removeNodeLabels(instance, removeLabels.map(_.getId))
+ }
+
+ @Transactional(rollbackFor = Array(classOf[Exception]))
+ override def removeLabelsFromNodeWithoutPermanent(instance: ServiceInstance, permanentLabel: Array[String] = Array()): Unit = {
+ val labels = labelManagerPersistence.getLabelByServiceInstance(instance)
+ val lowerCasePermanentLabels = permanentLabel.map(_.toLowerCase())
+ val withoutPermanentLabel = labels.filterNot(label => lowerCasePermanentLabels.contains(label.getLabelKey.toLowerCase)).map(_.getId)
+ labelManagerPersistence.removeNodeLabels(instance, withoutPermanentLabel)
}
/**
@@ -171,7 +185,7 @@
}
override def getNodesByLabel(label: Label[_]): util.List[ServiceInstance] = {
- val persistenceLabel = labelFactory.convertLabel(label, classOf[PersistenceLabel])
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
labelManagerPersistence.getNodeByLabelKeyValue(persistenceLabel.getLabelKey,persistenceLabel.getStringValue).distinct
}
@@ -183,46 +197,51 @@
}
/**
- * Get scored node instances
- *
- * @param labels searchableLabel or other normal labels
- * @return
- */
+ * Get scored node instances
+ *
+ * @param labels searchableLabel or other normal labels
+ * @return
+ */
override def getScoredNodesByLabels(labels: util.List[Label[_]]): util.List[ScoreServiceInstance] = {
getScoredNodeMapsByLabels(labels).map(_._1).toList
}
+
+ /**
+ * 1. Get the key value of the label
+ * 2.
+ * @param labels
+ * @return
+ */
override def getScoredNodeMapsByLabels(labels: util.List[Label[_]]): util.Map[ScoreServiceInstance, util.List[Label[_]]] = {
//Try to convert the label list to key value list
- val kvList = labels.map( label => Option(labelFactory.convertLabel(label, classOf[PersistenceLabel])))
- .filter(option => option.isDefined && Option(option.get.getValue).isDefined).map(_.get.getValue).toList
- if (kvList.nonEmpty) {
+ if (null != labels && labels.nonEmpty) {
//Get the persistence labels by kvList
- val persistenceLabels = labelManagerPersistence.getLabelsByValueList(kvList, ValueRelation.ALL)
-
- if (null != persistenceLabels && persistenceLabels.nonEmpty) {
- val labelsFromDB = persistenceLabels.map(label => labelFactory.createLabel(label.getLabelKey, label.getValue).asInstanceOf[Label[_]])
- val requireLabels = labels.filter(_.getFeature == Feature.CORE)
- val oldSize = requireLabels.size
- val newSize = requireLabels.count(label => labelsFromDB.exists { dbLabel =>
- dbLabel.getLabelKey.equals(label.getLabelKey) && label.getStringValue.equalsIgnoreCase(dbLabel.getStringValue)
- })
- if (oldSize != newSize) return new util.HashMap[ScoreServiceInstance, util.List[Label[_]]]()
- //Extra the necessary labels whose feature equals Feature.CORE or Feature.SUITABLE
- val necessaryLabels = persistenceLabels.filter(label => requireLabels.exists(_.getLabelKey.equalsIgnoreCase(label.getLabelKey))).toList
- return getScoredNodeMapsByLabels(persistenceLabels, necessaryLabels)
- }
-
+ val requireLabels = labels.filter(_.getFeature == Feature.CORE)
+ //Extra the necessary labels whose feature equals Feature.CORE or Feature.SUITABLE
+ val necessaryLabels = requireLabels.map(LabelManagerUtils.convertPersistenceLabel)
+ val inputLabels = labels.map(LabelManagerUtils.convertPersistenceLabel)
+ return getScoredNodeMapsByLabels(inputLabels, necessaryLabels)
}
new util.HashMap[ScoreServiceInstance, util.List[Label[_]]]()
}
+ /**
+ * 1. Get the relationship between the incoming label and node
+ * 2. get all instances by input labels
+ * 3. get instance all labels
+ * 4. Judge labels
+ * @param labels
+ * @param necessaryLabels
+ * @return
+ */
private def getScoredNodeMapsByLabels(labels: util.List[PersistenceLabel],
necessaryLabels: util.List[PersistenceLabel]): util.Map[ScoreServiceInstance, util.List[Label[_]]] = {
- //Get the in-degree relations ( Label -> Node )
+ //Get the in-degree relations ( Label -> Nodes )
val inNodeDegree = labelManagerPersistence.getNodeRelationsByLabels(if (necessaryLabels.nonEmpty) necessaryLabels else labels)
if (inNodeDegree.isEmpty) {
return new util.HashMap[ScoreServiceInstance, util.List[Label[_]]]()
}
+ // serviceInstance --> labels
val instanceLabels = new mutable.HashMap[ServiceInstance, ArrayBuffer[Label[_]]]()
inNodeDegree.foreach { keyValue =>
keyValue._2.foreach { instance =>
@@ -233,39 +252,66 @@
labelList.get.add(keyValue._1)
}
}
+ // getAll instances
val instances = if (necessaryLabels.nonEmpty) {
//Cut the in-degree relations, drop inconsistent nodes
instanceLabels.filter(entry => entry._2.size >= necessaryLabels.size).keys
} else {
instanceLabels.keys
}
+
//Get the out-degree relations ( Node -> Label )
val outNodeDegree = labelManagerPersistence.getLabelRelationsByServiceInstance(instances.toList)
//outNodeDegree cannot be empty
if (outNodeDegree.nonEmpty) {
+ val necessaryLabelKeys = if (null == necessaryLabels || necessaryLabels.isEmpty) new mutable.HashSet[String]() else {
+ necessaryLabels.map(_.getLabelKey).toSet
+ }
//Rebuild in-degree relations
- inNodeDegree.clear()
- outNodeDegree.foreach{
- case(node, labels) =>
- labels.foreach( label => {
- if (!inNodeDegree.contains(label)) {
- val inNodes = new util.ArrayList[ServiceInstance]()
- inNodeDegree.put(label, inNodes)
- }
- val inNodes = inNodeDegree.get(label)
- inNodes.add(node)
- })
- }
- return nodeLabelScorer.calculate( inNodeDegree, outNodeDegree, labels).asInstanceOf[util.Map[ScoreServiceInstance, util.List[Label[_]]]]
+ inNodeDegree.clear()
+ val removeNodes = new ArrayBuffer[ServiceInstance]()
+ outNodeDegree.foreach{
+ case(node, iLabels) =>
+ //The core tag must be exactly the same
+ if (null != necessaryLabels ) {
+ val coreLabelKeys = iLabels.map(ManagerUtils.persistenceLabelToRealLabel).filter(_.getFeature == Feature.CORE).map(_.getLabelKey).toSet
+ if (necessaryLabelKeys.containsAll(coreLabelKeys) && coreLabelKeys.size == necessaryLabelKeys.size) {
+ iLabels.foreach( label => {
+ if (!inNodeDegree.contains(label)) {
+ val inNodes = new util.ArrayList[ServiceInstance]()
+ inNodeDegree.put(label, inNodes)
+ }
+ val inNodes = inNodeDegree.get(label)
+ inNodes.add(node)
+ })
+ } else {
+ removeNodes += node
+ }
+ }
+ }
+
+ // Remove nodes with mismatched labels
+ if (removeNodes.nonEmpty && removeNodes.size == outNodeDegree.size())
+ info(s"The entered labels${necessaryLabels} do not match the labels of the node itself")
+ removeNodes.foreach(outNodeDegree.remove(_))
+ return nodeLabelScorer.calculate( inNodeDegree, outNodeDegree, labels).asInstanceOf[util.Map[ScoreServiceInstance, util.List[Label[_]]]]
}
new util.HashMap[ScoreServiceInstance, util.List[Label[_]]]()
}
- private def tryToAddLabel(persistenceLabel: PersistenceLabel): Unit = {
- //1.插入linkis_manager_label 表,这里表应该有唯一约束,key和valueStr 调用Persistence的addLabel即可,忽略duplicateKey异常
- persistenceLabel.setLabelValueSize(persistenceLabel.getValue.size())
- Utils.tryCatch(labelManagerPersistence.addLabel(persistenceLabel)) { t: Throwable =>
- warn(s"Failed to add label ${t.getClass}")
+ private def tryToAddLabel(persistenceLabel: PersistenceLabel): Int = {
+ if (persistenceLabel.getId <= 0) {
+ val label = labelManagerPersistence.getLabelByKeyValue(persistenceLabel.getLabelKey, persistenceLabel.getStringValue)
+ if (null == label) {
+ persistenceLabel.setLabelValueSize(persistenceLabel.getValue.size())
+ Utils.tryCatch(labelManagerPersistence.addLabel(persistenceLabel)) { t: Throwable =>
+ warn(s"Failed to add label ${t.getClass}")
+ }
+ } else {
+ persistenceLabel.setId(label.getId)
+ }
}
+ persistenceLabel.getId
}
+
}
diff --git a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultResourceLabelService.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultResourceLabelService.scala
index 57c8f0a..5659181 100644
--- a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultResourceLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultResourceLabelService.scala
@@ -26,6 +26,7 @@
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineInstanceLabel
+import com.webank.wedatasphere.linkis.manager.label.LabelManagerUtils
import com.webank.wedatasphere.linkis.manager.label.service.ResourceLabelService
import com.webank.wedatasphere.linkis.manager.persistence.ResourceLabelPersistence
import org.springframework.beans.factory.annotation.Autowired
@@ -44,14 +45,7 @@
private val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
- private def convertPersistenceLabel(label: Label[_]): PersistenceLabel = {
- label match {
- case persistenceLabel: PersistenceLabel =>
- persistenceLabel
- case _ =>
- labelBuilderFactory.convertLabel[PersistenceLabel](label, classOf[PersistenceLabel])
- }
- }
+
/**
* 通过传入的Labels 查找所有和Resource相关的Label
@@ -63,7 +57,7 @@
*/
override def getResourceLabels(labels: util.List[Label[_]]): util.List[Label[_]] = {
val labelKeyValueList = labels.flatMap { label =>
- val persistenceLabel = convertPersistenceLabel(label)
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
persistenceLabel.getValue.map { keyValue =>
new LabelKeyValue(keyValue._1, keyValue._2)
}
@@ -87,7 +81,7 @@
*/
override def setResourceToLabel(label: Label[_], resource: NodeResource): Unit = {
- resourceLabelPersistence.setResourceToLabel(convertPersistenceLabel(label), ResourceUtils.toPersistenceResource(resource))
+ resourceLabelPersistence.setResourceToLabel(LabelManagerUtils.convertPersistenceLabel(label), ResourceUtils.toPersistenceResource(resource))
}
/**
@@ -99,7 +93,7 @@
override def getResourceByLabel(label: Label[_]): NodeResource = {
val persistenceResource = label match {
case p: PersistenceLabel => resourceLabelPersistence.getResourceByLabel(p)
- case _ => resourceLabelPersistence.getResourceByLabel(convertPersistenceLabel(label))
+ case _ => resourceLabelPersistence.getResourceByLabel(LabelManagerUtils.convertPersistenceLabel(label))
}
if(persistenceResource.isEmpty){
null
@@ -117,11 +111,11 @@
* @param label
*/
override def removeResourceByLabel(label: Label[_]): Unit = {
- resourceLabelPersistence.removeResourceByLabel(convertPersistenceLabel(label))
+ resourceLabelPersistence.removeResourceByLabel(LabelManagerUtils.convertPersistenceLabel(label))
}
override def removeResourceByLabels(labels: util.List[Label[_]]): Unit = {
- resourceLabelPersistence.removeResourceByLabels(labels.map(convertPersistenceLabel))
+ resourceLabelPersistence.removeResourceByLabels(labels.map(LabelManagerUtils.convertPersistenceLabel))
}
override def setEngineConnResourceToLabel(label: Label[_], nodeResource: NodeResource): Unit = {
@@ -129,7 +123,7 @@
case label:EngineInstanceLabel =>
val resource = ResourceUtils.toPersistenceResource(nodeResource)
resource.setTicketId(label.getInstance())
- resourceLabelPersistence.setResourceToLabel(convertPersistenceLabel(label), resource)
+ resourceLabelPersistence.setResourceToLabel(LabelManagerUtils.convertPersistenceLabel(label), resource)
case _ =>
}
}
diff --git a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultUserLabelService.scala b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultUserLabelService.scala
index 2b29df0..211cc55 100644
--- a/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultUserLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/label-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/label/service/impl/DefaultUserLabelService.scala
@@ -25,6 +25,7 @@
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.exception.LabelErrorException
import com.webank.wedatasphere.linkis.manager.label.service.UserLabelService
+import com.webank.wedatasphere.linkis.manager.label.LabelManagerUtils
import com.webank.wedatasphere.linkis.manager.persistence.LabelManagerPersistence
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@@ -48,7 +49,7 @@
override def addLabelToUser(user: String, label: Label[_]): Unit = {
//instance 存在
//1.插入linkis_manager_label 表,这里表应该有唯一约束,key和valueStr 调用Persistence的addLabel即可,忽略duplicateKey异常
- val persistenceLabel = labelFactory.convertLabel(label, classOf[PersistenceLabel])
+ val persistenceLabel = LabelManagerUtils.convertPersistenceLabel(label)
Utils.tryAndWarn(labelManagerPersistence.addLabel(persistenceLabel))
//2.查询出当前label的id
val dbLabel = labelManagerPersistence
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml
index 6160d4b..9105e43 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml
index 4b9004d..388afb5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/assembly/distribution.xml
@@ -43,31 +43,6 @@
</dependencySets>
<fileSets>
- <fileSet>
- <directory>${basedir}/src/main/resources</directory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0777</fileMode>
- <outputDirectory>conf</outputDirectory>
- <lineEnding>unix</lineEnding>
- </fileSet>
- <!--<fileSet>
- <directory>${basedir}/bin</directory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0777</fileMode>
- <outputDirectory>bin</outputDirectory>
- <lineEnding>unix</lineEnding>
- </fileSet>
- <fileSet>
- <directory>.</directory>
- <excludes>
- <exclude>*/**</exclude>
- </excludes>
- <outputDirectory>logs</outputDirectory>
- </fileSet>-->
</fileSets>
</assembly>
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/exception/AMErrorCode.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/exception/AMErrorCode.java
index 0d3b6ee..9573452 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/exception/AMErrorCode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/exception/AMErrorCode.java
@@ -26,12 +26,17 @@
UNSUPPORT_VALUE(21002,"unsupport value(不支持的值类型)"),
- PARAM_ERROR(210003,"param error(参数错误)")
+ PARAM_ERROR(210003,"param error(参数错误)"),
+ NOT_EXISTS_ENGINE_CONN(210003,"Not exists EngineConn(不存在的引擎)"),
+
+ AM_CONF_ERROR(210004,"AM configuration error(AM配置错误)")
;
AMErrorCode(int errorCode, String message) {
+ this.code = errorCode;
+ this.message = message;
}
private int code;
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/restful/EngineRestfulApi.java b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/restful/EngineRestfulApi.java
index f5bf74e..446eabf 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/restful/EngineRestfulApi.java
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/java/com/webank/wedatasphere/linkis/manager/am/restful/EngineRestfulApi.java
@@ -15,33 +15,54 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.restful;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
+import com.webank.wedatasphere.linkis.common.utils.ByteTimeUtils;
import com.webank.wedatasphere.linkis.manager.am.conf.AMConfiguration;
import com.webank.wedatasphere.linkis.manager.am.exception.AMErrorCode;
import com.webank.wedatasphere.linkis.manager.am.exception.AMErrorException;
+import com.webank.wedatasphere.linkis.manager.am.service.engine.EngineCreateService;
import com.webank.wedatasphere.linkis.manager.am.service.engine.EngineInfoService;
import com.webank.wedatasphere.linkis.manager.am.utils.AMUtils;
import com.webank.wedatasphere.linkis.manager.am.vo.AMEngineNodeVo;
-import com.webank.wedatasphere.linkis.manager.am.vo.EMNodeVo;
-import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeHealthy;
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus;
-import com.webank.wedatasphere.linkis.manager.common.entity.metrics.NodeHealthyInfo;
import com.webank.wedatasphere.linkis.manager.common.entity.node.AMEMNode;
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode;
+import com.webank.wedatasphere.linkis.manager.common.protocol.engine.EngineCreateRequest;
+import com.webank.wedatasphere.linkis.manager.common.protocol.engine.EngineStopRequest;
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactory;
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext;
-import com.webank.wedatasphere.linkis.manager.label.builder.factory.StdLabelBuilderFactory;
import com.webank.wedatasphere.linkis.manager.label.entity.Label;
import com.webank.wedatasphere.linkis.manager.label.entity.UserModifiable;
import com.webank.wedatasphere.linkis.manager.label.exception.LabelErrorException;
import com.webank.wedatasphere.linkis.manager.label.service.NodeLabelService;
+import com.webank.wedatasphere.linkis.message.builder.MessageJob;
+import com.webank.wedatasphere.linkis.message.publisher.MessagePublisher;
+import com.webank.wedatasphere.linkis.resourcemanager.utils.RMUtils;
import com.webank.wedatasphere.linkis.server.Message;
import com.webank.wedatasphere.linkis.server.security.SecurityFilter;
+import java.io.IOException;
+import java.util.*;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.stream.Collectors;
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
+import org.apache.commons.lang.exception.ExceptionUtils;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import org.slf4j.Logger;
@@ -49,17 +70,6 @@
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
-import javax.servlet.http.HttpServletRequest;
-import javax.ws.rs.*;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.lang.reflect.Array;
-import java.util.*;
-import java.util.stream.Collectors;
-
-
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
@Component
@@ -70,14 +80,86 @@
private EngineInfoService engineInfoService;
@Autowired
+ private EngineCreateService engineCreateService;
+
+ @Autowired
private NodeLabelService nodeLabelService;
+ @Autowired
+ private MessagePublisher messagePublisher;
+
private final ObjectMapper objectMapper = new ObjectMapper();
private LabelBuilderFactory stdLabelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory();
private Logger logger = LoggerFactory.getLogger(EMRestfulApi.class);
+ @POST
+ @Path("createEngineConn")
+ public Response createEngineConn(@Context HttpServletRequest req, JsonNode jsonNode)
+ throws IOException, InterruptedException {
+ String userName = SecurityFilter.getLoginUsername(req);
+ EngineCreateRequest engineCreateRequest = objectMapper.readValue(jsonNode, EngineCreateRequest.class);
+ engineCreateRequest.setUser(userName);
+ long timeout = engineCreateRequest.getTimeOut();
+ if(timeout <= 0) {
+ timeout = AMConfiguration.ENGINE_CONN_START_REST_MAX_WAIT_TIME().getValue().toLong();
+ engineCreateRequest.setTimeOut(timeout);
+ }
+ logger.info("User {} try to create a engineConn with maxStartTime {}. EngineCreateRequest is {}.", userName,
+ ByteTimeUtils.msDurationToString(timeout), engineCreateRequest);
+ MessageJob job = messagePublisher.publish(engineCreateRequest);
+ EngineNode engineNode;
+ try {
+ engineNode = (EngineNode) job.get(timeout, TimeUnit.MILLISECONDS);
+ } catch (TimeoutException e) {
+ logger.error(String.format("User %s create engineConn timeout.", userName), e);
+ job.cancel(true);
+ return Message.messageToResponse(Message
+ .error("Create engineConn timeout, usually caused by the too long initialization of EngineConn(创建引擎超时,通常都是因为初始化引擎时间太长导致)."));
+ } catch (ExecutionException e) {
+ logger.error(String.format("User %s create engineConn failed.", userName), e);
+ return Message.messageToResponse(Message
+ .error(String.format("Create engineConn failed, caused by %s.", ExceptionUtils.getRootCauseMessage(e))));
+ }
+ logger.info("Finished to create a engineConn for user {}. NodeInfo is {}.", userName, engineNode);
+ //to transform to a map
+ Map<String, Object> retEngineNode = new HashMap<>();
+ retEngineNode.put("serviceInstance", engineNode.getServiceInstance());
+ retEngineNode.put("nodeStatus", engineNode.getNodeStatus().toString());
+ return Message.messageToResponse(Message.ok("create engineConn succeed.").data("engine", retEngineNode));
+ }
+
+
+ @POST
+ @Path("/getEngineConn")
+ public Response getEngineConn(@Context HttpServletRequest req, JsonNode jsonNode) throws AMErrorException {
+ String userName = SecurityFilter.getLoginUsername(req);
+ ServiceInstance serviceInstance = getServiceInstance(jsonNode);
+ EngineNode engineNode = engineCreateService.getEngineNode(serviceInstance);
+ if(!userName.equals(engineNode.getOwner()) && !isAdmin(userName)) {
+ return Message.messageToResponse(Message.error("You have no permission to access EngineConn " + serviceInstance));
+ }
+ return Message.messageToResponse(Message.ok().data("engine", engineNode));
+ }
+
+ @POST
+ @Path("killEngineConn")
+ public Response killEngineConn(@Context HttpServletRequest req, JsonNode jsonNode) throws Exception {
+ String userName = SecurityFilter.getLoginUsername(req);
+ ServiceInstance serviceInstance = getServiceInstance(jsonNode);
+ logger.info("User {} try to kill engineConn {}.", userName, serviceInstance);
+ EngineNode engineNode = engineCreateService.getEngineNode(serviceInstance);
+ if(!userName.equals(engineNode.getOwner()) && !isAdmin(userName)) {
+ return Message.messageToResponse(Message.error("You have no permission to kill EngineConn " + serviceInstance));
+ }
+ EngineStopRequest stopEngineRequest = new EngineStopRequest(serviceInstance, userName);
+ MessageJob job = messagePublisher.publish(stopEngineRequest);
+ job.get(RMUtils.MANAGER_KILL_ENGINE_EAIT().getValue().toLong(), TimeUnit.MILLISECONDS);
+ logger.info("Finished to kill engineConn {}.", serviceInstance);
+ return Message.messageToResponse(Message.ok("Kill engineConn succeed."));
+ }
+
@GET
@Path("/listUserEngines")
public Response listUserEngines(@Context HttpServletRequest req) {
@@ -90,13 +172,13 @@
@Path("/listEMEngines")
public Response listEMEngines(@Context HttpServletRequest req, JsonNode jsonNode) throws IOException, AMErrorException {
String username = SecurityFilter.getLoginUsername(req);
- String[] adminArray = AMConfiguration.GOVERNANCE_STATION_ADMIN().getValue().split(",");
- if(adminArray != null && !Arrays.asList(adminArray).contains(username)){
- throw new AMErrorException(210003,"only admin can search engine information(只有管理员才能查询所有引擎信息)");
+ if(!isAdmin(username)){
+ throw new AMErrorException(210003,"Only admin can search engine information(只有管理员才能查询所有引擎信息).");
}
AMEMNode amemNode = objectMapper.readValue(jsonNode.get("em"), AMEMNode.class);
JsonNode emInstace = jsonNode.get("emInstance");
JsonNode nodeStatus = jsonNode.get("nodeStatus");
+ JsonNode engineType = jsonNode.get("engineType");
JsonNode owner = jsonNode.get("owner");
List<EngineNode> engineNodes = engineInfoService.listEMEngines(amemNode);
ArrayList<AMEngineNodeVo> allengineNodes = AMUtils.copyToAMEngineNodeVo(engineNodes);
@@ -106,35 +188,27 @@
}
ArrayList<AMEngineNodeVo> allEMVoFilter2 = allEMVoFilter1;
if(CollectionUtils.isNotEmpty(allEMVoFilter2) && nodeStatus != null && !StringUtils.isEmpty(nodeStatus.asText())){
- allEMVoFilter2 = (ArrayList<AMEngineNodeVo>) allEMVoFilter2.stream().filter(em -> {return em.getNodeStatus() != null ? em.getNodeStatus().equals(NodeStatus.valueOf(nodeStatus.asText())) : true;}).collect(Collectors.toList());
+ allEMVoFilter2 = (ArrayList<AMEngineNodeVo>) allEMVoFilter2.stream().filter(em -> {return em.getNodeStatus() != null ? em.getNodeStatus().equals(NodeStatus.valueOf(nodeStatus.asText())) : false;}).collect(Collectors.toList());
}
ArrayList<AMEngineNodeVo> allEMVoFilter3 = allEMVoFilter2;
if(CollectionUtils.isNotEmpty(allEMVoFilter3) && owner != null && !StringUtils.isEmpty(owner.asText())){
allEMVoFilter3 = (ArrayList<AMEngineNodeVo>) allEMVoFilter3.stream().filter(em ->{return em.getOwner().equalsIgnoreCase(owner.asText());}).collect(Collectors.toList());
}
- return Message.messageToResponse(Message.ok().data("engines", allEMVoFilter3));
+ ArrayList<AMEngineNodeVo> allEMVoFilter4 = allEMVoFilter3;
+ if(CollectionUtils.isNotEmpty(allEMVoFilter4) && engineType != null && !StringUtils.isEmpty(engineType.asText())){
+ allEMVoFilter4 = (ArrayList<AMEngineNodeVo>) allEMVoFilter4.stream().filter(em ->{return em.getEngineType().equalsIgnoreCase(engineType.asText());}).collect(Collectors.toList());
+ }
+ return Message.messageToResponse(Message.ok().data("engines", allEMVoFilter4));
}
@PUT
@Path("/modifyEngineInfo")
public Response modifyEngineInfo(@Context HttpServletRequest req, JsonNode jsonNode) throws AMErrorException, LabelErrorException {
String username = SecurityFilter.getLoginUsername(req);
- String[] adminArray = AMConfiguration.GOVERNANCE_STATION_ADMIN().getValue().split(",");
- if(adminArray != null && !Arrays.asList(adminArray).contains(username)){
- throw new AMErrorException(210003,"only admin can modify engine information(只有管理员才能修改引擎信息)");
+ if(!isAdmin(username)){
+ throw new AMErrorException(210003,"Only admin can modify engineConn information(只有管理员才能修改引擎信息).");
}
- String applicationName = jsonNode.get("applicationName").asText();
- String instance = jsonNode.get("instance").asText();
- if(StringUtils.isEmpty(applicationName)){
- throw new AMErrorException(AMErrorCode.QUERY_PARAM_NULL.getCode(), "applicationName cannot be null(请求参数applicationName不能为空)");
- }
- if(StringUtils.isEmpty(instance)){
- throw new AMErrorException(AMErrorCode.QUERY_PARAM_NULL.getCode(), "instance cannot be null(请求参数instance不能为空)");
- }
- ServiceInstance serviceInstance = ServiceInstance.apply(applicationName,instance);
- if(serviceInstance == null){
- throw new AMErrorException(AMErrorCode.QUERY_PARAM_NULL.getCode(),"serviceInstance:" + applicationName + " non-existent(服务实例" + applicationName + "不存在)");
- }
+ ServiceInstance serviceInstance = getServiceInstance(jsonNode);
JsonNode labels = jsonNode.get("labels");
Set<String> labelKeySet = new HashSet<>();
if(labels != null){
@@ -152,7 +226,7 @@
newLabelList.add(newLabel);
}
if(labelKeySet.size() != newLabelList.size()){
- throw new AMErrorException(210003, "Failed to update label, include repeat label(更新label失败,包含重复label)");
+ throw new AMErrorException(210003, "Failed to update label, include repeat label(更新label失败,包含重复label)");
}
nodeLabelService.updateLabelsToNode(serviceInstance, newLabelList);
logger.info("success to update label of instance: " + serviceInstance.getInstance());
@@ -166,4 +240,21 @@
NodeStatus[] nodeStatus = NodeStatus.values();
return Message.messageToResponse(Message.ok().data("nodeStatus", nodeStatus));
}
+
+ private boolean isAdmin(String user) {
+ String[] adminArray = AMConfiguration.GOVERNANCE_STATION_ADMIN().getValue().split(",");
+ return ArrayUtils.contains(adminArray, user);
+ }
+
+ private ServiceInstance getServiceInstance(JsonNode jsonNode) throws AMErrorException {
+ String applicationName = jsonNode.get("applicationName").asText();
+ String instance = jsonNode.get("instance").asText();
+ if(StringUtils.isEmpty(applicationName)){
+ throw new AMErrorException(AMErrorCode.QUERY_PARAM_NULL.getCode(), "applicationName cannot be null(请求参数applicationName不能为空)");
+ }
+ if(StringUtils.isEmpty(instance)){
+ throw new AMErrorException(AMErrorCode.QUERY_PARAM_NULL.getCode(), "instance cannot be null(请求参数instance不能为空)");
+ }
+ return ServiceInstance.apply(applicationName,instance);
+ }
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/conf/AMConfiguration.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/conf/AMConfiguration.scala
index 971bbc3..776280a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/conf/AMConfiguration.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/conf/AMConfiguration.scala
@@ -27,6 +27,8 @@
val ENGINE_START_MAX_TIME = CommonVars("wds.linkis.manager.am.engine.start.max.time", new TimeType("10m"))
+ val ENGINE_CONN_START_REST_MAX_WAIT_TIME = CommonVars("wds.linkis.manager.am.engine.rest.start.max.time", new TimeType("40s"))
+
val ENGINE_REUSE_MAX_TIME = CommonVars("wds.linkis.manager.am.engine.reuse.max.time", new TimeType("5m"))
val ENGINE_REUSE_COUNT_LIMIT = CommonVars("wds.linkis.manager.am.engine.reuse.count.limit", 10)
@@ -48,7 +50,11 @@
val ENGINECONN_DEBUG_ENABLED = CommonVars("wds.linkis.engineconn.debug.mode.enable", false)
- val MULTI_USER_ENGINE_TYPES = CommonVars("wds.linkis.multi.user.engine.types", "jdbc,es,presto")
+ val MULTI_USER_ENGINE_TYPES = CommonVars("wds.linkis.multi.user.engine.types", "jdbc,es,presto,io_file")
+
+ val MULTI_USER_ENGINE_USER = CommonVars("wds.linkis.multi.user.engine.user", "{jdbc:\"hadoop\", es: \"hadoop\", presto:\"hadoop\",io_file:\"root\"}")
+
+ val MONITOR_SWITCH_ON = CommonVars("wds.linkis.manager.am.monitor.switch.on", true)
val ENGINE_LOCKER_MAX_TIME = CommonVars("wds.linkis.manager.am.engine.locker.max.time", 1000*60*5)
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.scala
index 06a007d..e9ca8c4 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/label/MultiUserEngineReuseLabelChooser.scala
@@ -15,40 +15,59 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.label
import java.util
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.manager.am.conf.AMConfiguration
+import com.webank.wedatasphere.linkis.manager.am.exception.{AMErrorCode, AMErrorException}
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel}
+import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
+import org.apache.commons.lang.StringUtils
import org.springframework.stereotype.Component
import scala.collection.JavaConverters._
-
@Component
class MultiUserEngineReuseLabelChooser extends EngineReuseLabelChooser with Logging {
private val multiUserEngine = AMConfiguration.MULTI_USER_ENGINE_TYPES.getValue.split(",")
+ private val userMap = getMultiUserEngineUserMap()
+
+ def getMultiUserEngineUserMap(): util.Map[String, String] = {
+ val userJson = AMConfiguration.MULTI_USER_ENGINE_USER.getValue
+ if (StringUtils.isNotBlank(userJson)) {
+ val userMap = BDPJettyServerHelper.gson.fromJson(userJson, classOf[java.util.Map[String, String]])
+ userMap
+ } else {
+ throw new AMErrorException(AMErrorCode.AM_CONF_ERROR.getCode, s"Multi-user engine parameter configuration error,please check key ${AMConfiguration.MULTI_USER_ENGINE_USER.key}")
+ }
+
+ }
+
/**
- * 过滤掉支持多用户引擎的UserCreator Label
- *
- * @param labelList
- * @return
- */
+ * Filter out UserCreator Label that supports multi-user engine
+ * 过滤掉支持多用户引擎的UserCreator Label
+ *
+ * @param labelList
+ * @return
+ */
override def chooseLabels(labelList: util.List[Label[_]]): util.List[Label[_]] = {
val labels = labelList.asScala
val engineTypeLabelOption = labels.find(_.isInstanceOf[EngineTypeLabel])
if (engineTypeLabelOption.isDefined) {
val engineTypeLabel = engineTypeLabelOption.get.asInstanceOf[EngineTypeLabel]
val maybeString = multiUserEngine.find(_.equalsIgnoreCase(engineTypeLabel.getEngineType))
- if (maybeString.isDefined) {
- info("For multi user engine remove userCreatorLabel")
- return labels.filterNot(_.isInstanceOf[UserCreatorLabel]).asJava
+ val userCreatorLabelOption = labels.find(_.isInstanceOf[UserCreatorLabel])
+ if (maybeString.isDefined && userCreatorLabelOption.isDefined) {
+ val userAdmin = userMap.get(engineTypeLabel.getEngineType)
+ val userCreatorLabel = userCreatorLabelOption.get.asInstanceOf[UserCreatorLabel]
+ info(s"For multi user engine to reset userCreatorLabel user ${userCreatorLabel.getUser} to Admin $userAdmin ")
+ userCreatorLabel.setUser(userAdmin)
+ return labels.asJava
}
}
labelList
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEMNodeManager.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEMNodeManager.scala
index bac0ef3..0b28418 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEMNodeManager.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEMNodeManager.scala
@@ -149,13 +149,13 @@
}
override def deleteEM(emNode: EMNode): Unit = {
-
nodeManagerPersistence.removeNodeInstance(emNode)
- info("Finished to clear emNode instance info")
+ info(s"Finished to clear emNode instance(${emNode.getServiceInstance}) info ")
nodeMetricManagerPersistence.deleteNodeMetrics(emNode)
- info("Finished to clear emNode metrics info")
+ info(s"Finished to clear emNode(${emNode.getServiceInstance}) metrics info")
}
+
override def pauseEM(serviceInstance: ServiceInstance): Unit = {
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEngineNodeManager.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEngineNodeManager.scala
index bad36bb..3d14c38 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEngineNodeManager.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/manager/DefaultEngineNodeManager.scala
@@ -66,6 +66,8 @@
@Autowired
private var labelManagerPersistence: LabelManagerPersistence = _
+ private val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
+
override def listEngines(user: String): util.List[EngineNode] = {
// TODO: user 应该是除了root,hadoop
val nodes = nodeManagerPersistence.getNodes(user).map(_.getServiceInstance).map(nodeManagerPersistence.getEngineNode)
@@ -217,7 +219,7 @@
*/
override def deleteEngineNode(engineNode: EngineNode): Unit = {
nodeManagerPersistence.deleteEngineNode(engineNode)
- nodeMetricManagerPersistence.deleteNodeMetrics(engineNode)
+
}
override def getEngineNode(serviceInstance: ServiceInstance): EngineNode = {
@@ -230,7 +232,6 @@
//2.update serviceInstance 表,包括 instance替换,替换mark,owner,updator,creator的空值,更新updateTime
//3.update engine_em关联表
//4.update label ticket_id ==> instance
- val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
val engineLabel = labelBuilderFactory.createLabel(classOf[EngineInstanceLabel])
engineLabel.setInstance(engineNode.getServiceInstance.getInstance)
engineLabel.setServiceName(engineNode.getServiceInstance.getApplicationName)
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/cache/DefaultConfCacheService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/cache/DefaultConfCacheService.scala
new file mode 100644
index 0000000..172240a
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/cache/DefaultConfCacheService.scala
@@ -0,0 +1,49 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package com.webank.wedatasphere.linkis.manager.am.service.cache
+
+import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.governance.common.protocol.conf.{RequestQueryEngineConfig, RequestQueryGlobalConfig}
+import com.webank.wedatasphere.linkis.manager.common.protocol.conf.RemoveCacheConfRequest
+import com.webank.wedatasphere.linkis.message.annotation.Receiver
+import com.webank.wedatasphere.linkis.rpc.interceptor.common.CacheableRPCInterceptor
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.stereotype.Service
+
+@Service
+class DefaultConfCacheService extends Logging {
+
+ @Autowired
+ private var cacheableRPCInterceptor: CacheableRPCInterceptor = _
+
+ @Receiver
+ def removeCacheConfiguration(removeCacheConfRequest: RemoveCacheConfRequest): Boolean = {
+ if (removeCacheConfRequest.getUserCreatorLabel != null) {
+ if (removeCacheConfRequest.getEngineTypeLabel != null) {
+ val request = RequestQueryEngineConfig(removeCacheConfRequest.getUserCreatorLabel, removeCacheConfRequest.getEngineTypeLabel)
+ cacheableRPCInterceptor.removeCache(request.toString)
+ info(s"success to clear cache about configuration of ${removeCacheConfRequest.getEngineTypeLabel.getStringValue}-${removeCacheConfRequest.getEngineTypeLabel.getStringValue}")
+ } else {
+ val request = RequestQueryGlobalConfig(removeCacheConfRequest.getUserCreatorLabel.getUser)
+ cacheableRPCInterceptor.removeCache(request.toString)
+ info(s"success to clear cache about global configuration of ${removeCacheConfRequest.getUserCreatorLabel.getUser}")
+ }
+ }
+ true
+ }
+}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMEngineService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMEngineService.scala
index 12b7e0b..98bc7d8 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMEngineService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMEngineService.scala
@@ -67,7 +67,7 @@
info(s"EM ${emNode.getServiceInstance} start to create Engine ${engineBuildRequest}")
val engineNode = emNodeManager.createEngine(engineBuildRequest, emNode)
- info(s"EM ${emNode.getServiceInstance} Finished to create Engine ${engineBuildRequest}")
+ info(s"EM ${emNode.getServiceInstance} Finished to create Engine ${engineBuildRequest.ticketId}")
engineNode.setLabels(emNode.getLabels.filter(_.isInstanceOf[EngineNodeLabel]))
engineNode.setEMNode(emNode)
engineNode
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMRegisterService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMRegisterService.scala
index c614b9b..fc6485e 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMRegisterService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMRegisterService.scala
@@ -76,7 +76,7 @@
if (null == emRegister.getLabels) {
emRegister.setLabels(new util.HashMap[String, Object]())
}
- emRegister.getLabels.put(eMInstanceLabel.getLabelKey, eMInstanceLabel.getValue)
+ emRegister.getLabels.put(eMInstanceLabel.getLabelKey, eMInstanceLabel.getStringValue)
val instanceLabelAddRequest = new NodeLabelAddRequest(emRegister.getServiceInstance, emRegister.getLabels)
info(s"Start to publish em{${emRegister.getServiceInstance}} label request to Label ")
val job = publisher.publish(instanceLabelAddRequest)
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMUnregisterService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMUnregisterService.scala
index b6a8f7d..bde5f33 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMUnregisterService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/em/DefaultEMUnregisterService.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.em
import java.util.concurrent.TimeUnit
@@ -31,7 +30,6 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
-
@Service
class DefaultEMUnregisterService extends EMUnregisterService with Logging {
@@ -55,11 +53,12 @@
emClearRequest.setEm(node)
emClearRequest.setUser(stopEMRequest.getUser)
val job = smc.publish(emClearRequest)
+ Utils.tryAndWarn(job.get(AMConfiguration.STOP_ENGINE_WAIT.getValue.toLong, TimeUnit.MILLISECONDS))
// clear Label
val instanceLabelRemoveRequest = new NodeLabelRemoveRequest(node.getServiceInstance, false)
val labelJob = smc.publish(instanceLabelRemoveRequest)
- Utils.tryAndWarn(job.get(AMConfiguration.STOP_ENGINE_WAIT.getValue.toLong, TimeUnit.MILLISECONDS))
Utils.tryAndWarn(labelJob.get(AMConfiguration.STOP_ENGINE_WAIT.getValue.toLong, TimeUnit.MILLISECONDS))
+ //此处需要先清理ECM再等待,避免ECM重启过快,导致ECM资源没清理干净
clearEMInstanceInfo(emClearRequest)
info(s" user ${stopEMRequest.getUser} finished to stop em ${stopEMRequest.getEm}")
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala
index 44fc41d..990162f 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineAskEngineService.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.engine
import java.util.concurrent.atomic.AtomicInteger
@@ -25,6 +24,7 @@
import com.webank.wedatasphere.linkis.manager.am.conf.AMConfiguration
import com.webank.wedatasphere.linkis.manager.common.constant.AMConstant
import com.webank.wedatasphere.linkis.manager.common.protocol.engine._
+import com.webank.wedatasphere.linkis.manager.label.constant.LabelKeyConstant
import com.webank.wedatasphere.linkis.message.annotation.Receiver
import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext
import com.webank.wedatasphere.linkis.rpc.Sender
@@ -36,7 +36,6 @@
import scala.concurrent._
import scala.util.{Failure, Success}
-
@Service
class DefaultEngineAskEngineService extends AbstractEngineService with EngineAskEngineService with Logging {
@@ -55,21 +54,27 @@
@Receiver
override def askEngine(engineAskRequest: EngineAskRequest, smc: ServiceMethodContext): Any = {
-
- if(!engineAskRequest.getLabels.containsKey("executeOnce")){
+ info(s"received engineAskRequest $engineAskRequest")
+ if(! engineAskRequest.getLabels.containsKey(LabelKeyConstant.EXECUTE_ONCE_KEY)){
val engineReuseRequest = new EngineReuseRequest()
engineReuseRequest.setLabels(engineAskRequest.getLabels)
engineReuseRequest.setTimeOut(engineAskRequest.getTimeOut)
engineReuseRequest.setUser(engineAskRequest.getUser)
- val reuseNode = Utils.tryAndWarn(engineReuseService.reuseEngine(engineReuseRequest))
+ val reuseNode = Utils.tryCatch(engineReuseService.reuseEngine(engineReuseRequest)) {
+ t: Throwable =>
+ warn(s"user ${engineAskRequest.getUser} reuse engine failed ${t.getMessage}")
+ null
+ }
if (null != reuseNode) {
info(s"Finished to ask engine for user ${engineAskRequest.getUser} by reuse node $reuseNode")
return reuseNode
}
}
+ val engineAskAsyncId = getAsyncId
val createNodeThread = Future {
+ info(s"Start to async($engineAskAsyncId) createEngine")
//如果原来的labels含engineInstance ,先去掉
engineAskRequest.getLabels.remove("engineInstance")
val engineCreateRequest = new EngineCreateRequest
@@ -83,12 +88,12 @@
//useEngine 需要加上超时
val createEngineNode = getEngineNodeManager.useEngine(createNode, timeout)
if (null == createEngineNode)
- throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"create engine${createNode.getServiceInstance} success, but to use engine failed")
+ throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"create engine${createNode.getServiceInstance} success, but to use engine failed")
info(s"Finished to ask engine for user ${engineAskRequest.getUser} by create node $createEngineNode")
createEngineNode
}
- val engineAskAsyncId = getAsyncId
+
createNodeThread.onComplete {
case Success(engineNode) =>
info(s"Success to async($engineAskAsyncId) createEngine $engineNode")
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala
index 09a8b9d..192dde6 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineConnStatusCallbackService.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.engine
import java.util
@@ -31,7 +30,6 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
-
@Service
class DefaultEngineConnStatusCallbackService extends EngineConnStatusCallbackService with Logging {
@@ -46,8 +44,11 @@
info(s"Start to deal engineConnStatusCallbackToAM $engineConnStatusCallbackToAM")
val nodeMetrics = new AMNodeMetrics
- val heartBeatMsg: java.util.Map[String, String] = new util.HashMap[String, String]()
+ val heartBeatMsg: java.util.Map[String, Any] = new util.HashMap[String, Any]()
heartBeatMsg.put(AMConstant.START_REASON, engineConnStatusCallbackToAM.initErrorMsg)
+ if (engineConnStatusCallbackToAM.canRetry) {
+ heartBeatMsg.put(AMConstant.EC_CAN_RETRY, engineConnStatusCallbackToAM.canRetry)
+ }
nodeMetrics.setHeartBeatMsg(BDPJettyServerHelper.jacksonJson.writeValueAsString(heartBeatMsg))
nodeMetrics.setServiceInstance(engineConnStatusCallbackToAM.serviceInstance)
nodeMetrics.setStatus(metricsConverter.convertStatus(engineConnStatusCallbackToAM.status))
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineCreateService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineCreateService.scala
index 15a8137..747aa32 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineCreateService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineCreateService.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.engine
@@ -24,33 +23,36 @@
import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.common.exception.LinkisRetryException
-import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.common.utils.{ByteTimeUtils, Logging, Utils}
import com.webank.wedatasphere.linkis.governance.common.conf.GovernanceCommonConf
import com.webank.wedatasphere.linkis.governance.common.conf.GovernanceCommonConf.ENGINE_CONN_MANAGER_SPRING_NAME
-import com.webank.wedatasphere.linkis.manager.am.conf.{AMConfiguration, EngineConnConfigurationService}
-import com.webank.wedatasphere.linkis.manager.am.exception.AMErrorException
+import com.webank.wedatasphere.linkis.governance.common.protocol.conf.{RequestQueryEngineConfig, RequestQueryGlobalConfig}
+import com.webank.wedatasphere.linkis.manager.am.conf.{AMConfiguration, ConfigurationMapCache, EngineConnConfigurationService}
+import com.webank.wedatasphere.linkis.manager.am.exception.{AMErrorCode, AMErrorException}
import com.webank.wedatasphere.linkis.manager.am.pointer.EngineConnPluginPointer
import com.webank.wedatasphere.linkis.manager.am.selector.NodeSelector
import com.webank.wedatasphere.linkis.manager.common.constant.AMConstant
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
import com.webank.wedatasphere.linkis.manager.common.entity.node.{EMNode, EngineNode}
import com.webank.wedatasphere.linkis.manager.common.entity.resource.NodeResource
+import com.webank.wedatasphere.linkis.manager.common.protocol.conf.RemoveCacheConfRequest
import com.webank.wedatasphere.linkis.manager.common.protocol.engine.{EngineCreateRequest, EngineStopRequest}
import com.webank.wedatasphere.linkis.manager.common.utils.ManagerUtils
import com.webank.wedatasphere.linkis.manager.engineplugin.common.launch.entity.{EngineConnBuildRequestImpl, EngineConnCreationDescImpl}
import com.webank.wedatasphere.linkis.manager.engineplugin.common.resource.TimeoutEngineResourceRequest
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
-import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.{EngineInstanceLabel, EngineTypeLabel}
import com.webank.wedatasphere.linkis.manager.label.entity.node.AliasServiceInstanceLabel
import com.webank.wedatasphere.linkis.manager.label.entity.{EngineNodeLabel, Label}
import com.webank.wedatasphere.linkis.manager.label.service.{NodeLabelService, UserLabelService}
import com.webank.wedatasphere.linkis.manager.label.utils.LabelUtils
-import com.webank.wedatasphere.linkis.manager.persistence.NodeMetricManagerPersistence
+import com.webank.wedatasphere.linkis.manager.persistence.{NodeMetricManagerPersistence, ResourceManagerPersistence}
import com.webank.wedatasphere.linkis.manager.service.common.label.{LabelChecker, LabelFilter}
import com.webank.wedatasphere.linkis.message.annotation.Receiver
import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext
import com.webank.wedatasphere.linkis.resourcemanager.service.ResourceManager
import com.webank.wedatasphere.linkis.resourcemanager.{AvailableResource, NotEnoughResource}
+import com.webank.wedatasphere.linkis.rpc.interceptor.common.CacheableRPCInterceptor
import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
import org.apache.commons.lang.StringUtils
import org.springframework.beans.factory.annotation.Autowired
@@ -59,8 +61,6 @@
import scala.collection.JavaConversions._
import scala.concurrent.duration.Duration
-
-
@Service
class DefaultEngineCreateService extends AbstractEngineService with EngineCreateService with Logging {
@@ -92,15 +92,37 @@
@Autowired
private var engineConnPluginPointer: EngineConnPluginPointer = _
-
@Autowired
private var nodeMetricManagerPersistence: NodeMetricManagerPersistence = _
+ @Autowired
+ private var resourceManagerPersistence: ResourceManagerPersistence = _
+
+ def getEngineNode(serviceInstance: ServiceInstance): EngineNode = {
+ val engineNode = getEngineNodeManager.getEngineNode(serviceInstance)
+ if (engineNode.getNodeStatus == null){
+ engineNode.setNodeStatus(NodeStatus.values()(nodeMetricManagerPersistence.getNodeMetrics(engineNode).getStatus))
+ }
+ if(engineNode != null) return engineNode
+ val labels = resourceManagerPersistence.getLabelsByTicketId(serviceInstance.getInstance)
+ labels.foreach { label =>
+ LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel[Label[_]](label.getLabelKey, label.getStringValue) match {
+ case engineInstanceLabel: EngineInstanceLabel =>
+ val serviceInstance = ServiceInstance(engineInstanceLabel.getServiceName, engineInstanceLabel.getInstance)
+ return getEngineNodeManager.getEngineNode(serviceInstance)
+ case _ =>
+ }
+ }
+ throw new AMErrorException(AMErrorCode.NOT_EXISTS_ENGINE_CONN.getCode, AMErrorCode.NOT_EXISTS_ENGINE_CONN.getMessage)
+ }
+
+
+
@Receiver
@throws[LinkisRetryException]
override def createEngine(engineCreateRequest: EngineCreateRequest, smc: ServiceMethodContext): EngineNode = {
-
- info(s"Start to create Engine for request: $engineCreateRequest")
+ val startTime = System.currentTimeMillis
+ info(s"Start to create Engine for request: $engineCreateRequest.")
val labelBuilderFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
val timeout = if (engineCreateRequest.getTimeOut <= 0) AMConfiguration.ENGINE_START_MAX_TIME.getValue.toLong else engineCreateRequest.getTimeOut
@@ -124,11 +146,11 @@
//3. 执行Select 比如负载过高,返回没有负载低的EM,每个规则如果返回为空就抛出异常
val choseNode = if (null == emScoreNodeList || emScoreNodeList.isEmpty) null else nodeSelector.choseNode(emScoreNodeList.toArray)
if (null == choseNode || choseNode.isEmpty) {
- throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"The em of labels${engineCreateRequest.getLabels} not found")
+ throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s" The em of labels${engineCreateRequest.getLabels} not found")
}
val emNode = choseNode.get.asInstanceOf[EMNode]
//4. 请求资源
- val (resourceTicketId, resource) = requestResource(engineCreateRequest, labelList, emNode, timeout)
+ val (resourceTicketId, resource) = requestResource(engineCreateRequest, labelFilter.choseEngineLabel(labelList), emNode, timeout)
//5. 封装engineBuildRequest对象,并发送给EM进行执行
val engineBuildRequest = EngineConnBuildRequestImpl(
@@ -138,9 +160,8 @@
EngineConnCreationDescImpl(engineCreateRequest.getCreateService, engineCreateRequest.getDescription, engineCreateRequest.getProperties))
//6. 调用EM发送引擎启动请求调用ASK TODO 异常和等待时间处理
- info("start to request ecm create engineConn")
val engineNode = getEMService().createEngine(engineBuildRequest, emNode)
- info(s"Finished to create engineConn $engineNode")
+ info(s"Finished to create engineConn $engineNode. ticketId is $resourceTicketId")
//7. 更新持久化信息:包括插入engine/metrics
//AM会更新serviceInstance表 需要将ticketID进行替换,并更新 EngineConn的Label 需要修改EngineInstanceLabel 中的id为Instance信息
val oldServiceInstance = new ServiceInstance
@@ -148,23 +169,38 @@
oldServiceInstance.setInstance(resourceTicketId)
getEngineNodeManager.updateEngineNode(oldServiceInstance, engineNode)
- //8. Add EngineConn's Label, and add engineConn's AliasLabel
+ //8. 新增 EngineConn的Label,添加engineConn的Alias
val engineConnAliasLabel = labelBuilderFactory.createLabel(classOf[AliasServiceInstanceLabel])
engineConnAliasLabel.setAlias(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue)
labelList.add(engineConnAliasLabel)
- nodeLabelService.addLabelsToNode(engineNode.getServiceInstance, LabelUtils.distinctLabel(labelList, fromEMGetEngineLabels(emNode.getLabels)))
- Utils.tryCatch{
- info(s"Start to wait idle status of engineConn($engineNode) ")
+ nodeLabelService.addLabelsToNode(engineNode.getServiceInstance, labelFilter.choseEngineLabel(LabelUtils.distinctLabel(labelList, fromEMGetEngineLabels(emNode.getLabels))))
+ if(System.currentTimeMillis - startTime >= timeout && engineCreateRequest.isIgnoreTimeout) {
+ info(s"Return a EngineConn $engineNode for request: $engineCreateRequest since the creator set ignoreTimeout=true and maxStartTime is reached.")
+ return engineNode
+ }
+ Utils.tryCatch {
+ val leftWaitTime = timeout - (System.currentTimeMillis - startTime)
+ info(s"Start to wait engineConn($engineNode) to be available, but only ${ByteTimeUtils.msDurationToString(leftWaitTime)} left.")
//9 获取启动的引擎信息,并等待引擎的状态变为IDLE,如果等待超时则返回给用户,并抛出异常
- Utils.waitUntil(() => ensuresIdle(engineNode), Duration(timeout, TimeUnit.MILLISECONDS))
- }{
- case e: TimeoutException =>
- info(s"Waiting for $engineNode initialization failure , now stop it")
+ Utils.waitUntil(() => ensuresIdle(engineNode, resourceTicketId), Duration(leftWaitTime, TimeUnit.MILLISECONDS))
+ } {
+ case _: TimeoutException =>
+ if(!engineCreateRequest.isIgnoreTimeout) {
+ info(s"Waiting for $engineNode initialization TimeoutException , now stop it.")
+ val stopEngineRequest = new EngineStopRequest(engineNode.getServiceInstance, ManagerUtils.getAdminUser)
+ smc.publish(stopEngineRequest)
+ throw new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, s"Waiting for Engine initialization failure, already waiting $timeout ms TicketId ${resourceTicketId}")
+ } else {
+ warn(s"Waiting for $engineNode initialization TimeoutException, ignore this exception since the creator set ignoreTimeout=true.")
+ return engineNode
+ }
+ case t: Throwable =>
+ info(s"Waiting for $engineNode initialization failure , now stop it.")
val stopEngineRequest = new EngineStopRequest(engineNode.getServiceInstance, ManagerUtils.getAdminUser)
smc.publish(stopEngineRequest)
- throw new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, s"Waiting for Engine initialization failure, already waiting $timeout ms")
+ throw t
}
- info(s"Finished to create Engine for request: $engineCreateRequest and get engineNode $engineNode")
+ info(s"Finished to create Engine for request: $engineCreateRequest and get engineNode $engineNode.")
engineNode
}
@@ -191,8 +227,8 @@
case AvailableResource(ticketId) =>
(ticketId, resource)
case NotEnoughResource(reason) =>
- warn(s"Insufficient user resources please retry( 用户资源不足,请重试: $reason)")
- throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"Insufficient user resources please retry( 用户资源不足,请重试: $reason)")
+ warn(s"资源不足,请重试: $reason")
+ throw new LinkisRetryException(AMConstant.EM_ERROR_CODE, s"资源不足,请重试: $reason")
}
}
@@ -202,28 +238,36 @@
}
}
- private def ensuresIdle(engineNode: EngineNode): Boolean = {
+ private def ensuresIdle(engineNode: EngineNode, resourceTicketId: String): Boolean = {
//TODO 逻辑需要修改,修改为engineConn主动上报
val engineNodeInfo = Utils.tryAndWarnMsg(getEngineNodeManager.getEngineNodeInfoByDB(engineNode))("Failed to from db get engine node info")
if (null == engineNodeInfo) return false
if (NodeStatus.isCompleted(engineNodeInfo.getNodeStatus)) {
val metrics = nodeMetricManagerPersistence.getNodeMetrics(engineNodeInfo)
- val reason = getStartErrorInfo(metrics.getHeartBeatMsg)
- throw new AMErrorException(AMConstant.EM_ERROR_CODE, s"failed to init engine .reason:${reason} (初始化引擎失败,原因: ${reason})")
+ val (reason, canRetry) = getStartErrorInfo(metrics.getHeartBeatMsg)
+ if(canRetry.isDefined) {
+ throw new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, s"${engineNode.getServiceInstance} ticketID:$resourceTicketId 初始化引擎失败,原因: ${reason}")
+ //throw new AMErrorException(AMConstant.EM_ERROR_CODE, s"初始化引擎失败,原因: ${reason}")
+ }
+ throw new AMErrorException(AMConstant.EM_ERROR_CODE, s"${engineNode.getServiceInstance} ticketID:$resourceTicketId 初始化引擎失败,原因: ${reason}")
}
NodeStatus.isAvailable(engineNodeInfo.getNodeStatus)
}
- private def getStartErrorInfo(msg: String): String = {
+ private def getStartErrorInfo(msg: String): (String, Option[Boolean]) = {
if (StringUtils.isNotBlank(msg)) {
val jsonNode = BDPJettyServerHelper.jacksonJson.readTree(msg)
if (jsonNode != null && jsonNode.has(AMConstant.START_REASON)) {
val startReason = jsonNode.get(AMConstant.START_REASON).asText()
- return startReason
+ if (jsonNode.has(AMConstant.EC_CAN_RETRY)) {
+ return (startReason, Some(true))
+ } else {
+ return (startReason, None)
+ }
}
}
- null
+ (null, None)
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineReuseService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineReuseService.scala
index be87da7..e10b1e7 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineReuseService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineReuseService.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.engine
import java.util
@@ -32,7 +31,7 @@
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode
import com.webank.wedatasphere.linkis.manager.common.protocol.engine.EngineReuseRequest
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
-import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.manager.label.entity.{EngineNodeLabel, Label}
import com.webank.wedatasphere.linkis.manager.label.entity.engine.ReuseExclusionLabel
import com.webank.wedatasphere.linkis.manager.label.entity.node.AliasServiceInstanceLabel
import com.webank.wedatasphere.linkis.manager.label.service.{NodeLabelService, UserLabelService}
@@ -44,7 +43,6 @@
import scala.collection.JavaConversions._
import scala.concurrent.duration.Duration
-
@Service
class DefaultEngineReuseService extends AbstractEngineService with EngineReuseService with Logging {
@@ -69,6 +67,9 @@
//Label:传入的Label和用户默认的Label 去重
var labelList: util.List[Label[_]] = LabelUtils.distinctLabel(labelBuilderFactory.getLabels(engineReuseRequest.getLabels),
userLabelService.getUserLabels(engineReuseRequest.getUser))
+
+ labelList = labelList.filter(_.isInstanceOf[EngineNodeLabel])
+
val engineConnAliasLabel = labelBuilderFactory.createLabel(classOf[AliasServiceInstanceLabel])
engineConnAliasLabel.setAlias(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue)
labelList.add(engineConnAliasLabel)
@@ -79,6 +80,7 @@
labelList = chooser.chooseLabels(labelList)
}
}
+
val instances = nodeLabelService.getScoredNodeMapsByLabels(labelList)
if (null == instances || instances.isEmpty) {
throw new LinkisRetryException(AMConstant.ENGINE_ERROR_CODE, s"No engine can be reused")
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineStopService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineStopService.scala
index 7181e29..25f5d97 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineStopService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/DefaultEngineStopService.scala
@@ -15,7 +15,6 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.engine
import java.util.concurrent.TimeUnit
@@ -31,7 +30,6 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
-
@Service
class DefaultEngineStopService extends AbstractEngineService with EngineStopService with Logging {
@@ -40,6 +38,7 @@
@Receiver
override def stopEngine(engineStopRequest: EngineStopRequest, smc: ServiceMethodContext): Unit = {
+ //TODO delete
engineStopRequest.getServiceInstance.setApplicationName(GovernanceCommonConf.ENGINE_CONN_SPRING_NAME.getValue)
info(s" user ${engineStopRequest.getUser} prepare to stop engine ${engineStopRequest.getServiceInstance}")
val node = getEngineNodeManager.getEngineNode(engineStopRequest.getServiceInstance)
@@ -54,13 +53,13 @@
engineInfoClearRequest.setUser(engineStopRequest.getUser)
val job = smc.publish(engineInfoClearRequest)
Utils.tryAndWarn(job.get(AMConfiguration.STOP_ENGINE_WAIT.getValue.toLong, TimeUnit.MILLISECONDS))
- info(s"Finished to clear RM info and stop Engine")
+ info(s"Finished to clear RM info and stop Engine $node")
// clear Label
val instanceLabelRemoveRequest = new NodeLabelRemoveRequest(node.getServiceInstance, true)
val labelJob = smc.publish(instanceLabelRemoveRequest)
Utils.tryAndWarn(labelJob.get(AMConfiguration.STOP_ENGINE_WAIT.getValue.toLong, TimeUnit.MILLISECONDS))
- info(s"Finished to clear Label info")
+ info(s"Finished to clear engineNode $node Label info")
getEngineNodeManager.deleteEngineNode(node)
info(s" user ${engineStopRequest.getUser} finished to stop engine ${engineStopRequest.getServiceInstance}")
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/EngineCreateService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/EngineCreateService.scala
index 2cd51ef..b36caa9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/EngineCreateService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/engine/EngineCreateService.scala
@@ -15,18 +15,20 @@
* limitations under the License.
*
*/
-
package com.webank.wedatasphere.linkis.manager.am.service.engine
+import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.common.exception.LinkisRetryException
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode
import com.webank.wedatasphere.linkis.manager.common.protocol.engine.EngineCreateRequest
import com.webank.wedatasphere.linkis.message.builder.ServiceMethodContext
-
trait EngineCreateService {
+ def getEngineNode(serviceInstance: ServiceInstance): EngineNode
+
@throws[LinkisRetryException]
def createEngine(engineCreateRequest: EngineCreateRequest, smc: ServiceMethodContext): EngineNode
+
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala
index fbb0579..cf09cbe 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/service/heartbeat/AMHeartbeatService.scala
@@ -21,6 +21,7 @@
import java.util.concurrent.TimeUnit
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.manager.am.conf.AMConfiguration
import com.webank.wedatasphere.linkis.manager.am.service.HeartbeatService
import com.webank.wedatasphere.linkis.manager.common.entity.metrics.AMNodeMetrics
import com.webank.wedatasphere.linkis.manager.common.monitor.ManagerMonitor
@@ -33,7 +34,6 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
-
@Service
class AMHeartbeatService extends HeartbeatService with Logging {
@@ -53,7 +53,7 @@
@PostConstruct
def init(): Unit = {
- if (null != managerMonitor) {
+ if (null != managerMonitor && AMConfiguration.MONITOR_SWITCH_ON.getValue) {
info("start init AMHeartbeatService monitor")
Utils.defaultScheduler.scheduleAtFixedRate(managerMonitor, 1000, RMConfiguration.RM_ENGINE_SCAN_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS)
diff --git a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/utils/AMUtils.scala b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/utils/AMUtils.scala
index 994cbf8..dd12d0b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/utils/AMUtils.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-application-manager/src/main/scala/com/webank/wedatasphere/linkis/manager/am/utils/AMUtils.scala
@@ -22,9 +22,11 @@
import com.google.gson.{Gson, JsonObject}
import com.webank.wedatasphere.linkis.manager.am.vo.{AMEngineNodeVo, EMNodeVo}
+import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
import com.webank.wedatasphere.linkis.manager.common.entity.node.{EMNode, EngineNode}
-import com.webank.wedatasphere.linkis.manager.common.entity.resource.{DriverAndYarnResource, LoadInstanceResource, Resource, ResourceSerializer}
+import com.webank.wedatasphere.linkis.manager.common.entity.resource.{DriverAndYarnResource, LoadInstanceResource, Resource, ResourceSerializer, ResourceType}
import com.webank.wedatasphere.linkis.manager.common.serializer.NodeResourceSerializer
+import com.webank.wedatasphere.linkis.manager.common.utils.ResourceUtils
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel
import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
import org.json4s.DefaultFormats
@@ -52,6 +54,7 @@
if(node.getNodeResource.getMaxResource != null) EMNodeVo.setMaxResource(mapper.readValue(write(node.getNodeResource.getMaxResource), classOf[util.Map[String, Any]]))
if(node.getNodeResource.getMinResource != null) EMNodeVo.setMinResource(mapper.readValue(write(node.getNodeResource.getMinResource), classOf[util.Map[String, Any]]))
if(node.getNodeResource.getUsedResource != null) EMNodeVo.setUsedResource(mapper.readValue(write(node.getNodeResource.getUsedResource), classOf[util.Map[String, Any]]))
+ else EMNodeVo.setUsedResource(mapper.readValue(write(Resource.initResource(ResourceType.Default)), classOf[util.Map[String, Any]]))
if(node.getNodeResource.getLockedResource != null) EMNodeVo.setLockedResource(mapper.readValue(write(node.getNodeResource.getLockedResource), classOf[util.Map[String, Any]]))
if(node.getNodeResource.getExpectedResource != null) EMNodeVo.setExpectedResource(mapper.readValue(write(node.getNodeResource.getExpectedResource), classOf[util.Map[String, Any]]))
if(node.getNodeResource.getLeftResource != null) EMNodeVo.setLeftResource(mapper.readValue(write(node.getNodeResource.getLeftResource), classOf[util.Map[String, Any]]))
@@ -94,7 +97,11 @@
}
}
if(node.getStartTime != null) AMEngineNodeVo.setStartTime(node.getStartTime)
- if(node.getNodeStatus != null) AMEngineNodeVo.setNodeStatus(node.getNodeStatus)
+ if(node.getNodeStatus != null) {
+ AMEngineNodeVo.setNodeStatus(node.getNodeStatus)
+ }else{
+ AMEngineNodeVo.setNodeStatus(NodeStatus.Starting)
+ }
if(node.getLock != null) AMEngineNodeVo.setLock(node.getLock)
if(node.getNodeResource != null){
if(node.getNodeResource.getResourceType != null) AMEngineNodeVo.setResourceType(node.getNodeResource.getResourceType)
@@ -106,6 +113,8 @@
case _ => node.getNodeResource.getUsedResource
}
AMEngineNodeVo.setUsedResource(mapper.readValue(write(realResource), classOf[util.Map[String, Any]]))
+ }else{
+ AMEngineNodeVo.setUsedResource(mapper.readValue(write(Resource.initResource(ResourceType.Default)), classOf[util.Map[String, Any]]))
}
if(node.getNodeResource.getLockedResource != null) AMEngineNodeVo.setLockedResource(mapper.readValue(write(node.getNodeResource.getLockedResource), classOf[util.Map[String, Any]]))
if(node.getNodeResource.getExpectedResource != null) AMEngineNodeVo.setExpectedResource(mapper.readValue(write(node.getNodeResource.getExpectedResource), classOf[util.Map[String, Any]]))
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-client/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-client/pom.xml
index 3d15698..463d34f 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-client/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-client/pom.xml
@@ -17,7 +17,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
</parent>
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/pom.xml
index e138218..ae5199a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!-- <relativePath>../../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/constant/AMConstant.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/constant/AMConstant.java
index a12f927..c0a0535 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/constant/AMConstant.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/constant/AMConstant.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.constant;
@@ -17,4 +35,6 @@
public static final String START_REASON = "start_reason";
+ public static final String EC_CAN_RETRY = "ec_can_try";
+
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeHealthy.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeHealthy.java
index 4d946d5..2c84b7c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeHealthy.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeHealthy.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.enumeration;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeStatus.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeStatus.java
index ea6d3aa..a08930f 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeStatus.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/enumeration/NodeStatus.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.enumeration;
@@ -10,10 +28,9 @@
* <p>
* manager中管理的engineConn状态:Starting ShuttingDown Unlock Idle Busy
*/
- Starting, ShuttingDown, Failed, Success,
- Idle, Busy,
- Locked, Unlock,
- Running;
+ Starting, Unlock, Locked,
+ Idle, Busy, Running, ShuttingDown, Failed, Success;
+
public static Boolean isAvailable(NodeStatus status) {
if (Idle == status || Busy == status || Locked == status || Unlock == status || Running == status) {
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/label/LabelKeyValue.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/label/LabelKeyValue.java
index ca7d582..92225a0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/label/LabelKeyValue.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/label/LabelKeyValue.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.label;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/AMNodeMetrics.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/AMNodeMetrics.java
index efd5f30..608095b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/AMNodeMetrics.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/AMNodeMetrics.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.metrics;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeHealthyInfo.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeHealthyInfo.java
index 364a120..985c446 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeHealthyInfo.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeHealthyInfo.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.metrics;
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeHealthy;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeMetrics.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeMetrics.java
index 7080060..be2ba0e 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeMetrics.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeMetrics.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.metrics;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeOverLoadInfo.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeOverLoadInfo.java
index 953f865..58d7b43 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeOverLoadInfo.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeOverLoadInfo.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.metrics;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeTaskInfo.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeTaskInfo.java
index 5e225cd..bf6c5c4 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeTaskInfo.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/metrics/NodeTaskInfo.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.metrics;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEMNode.java
index b8e4c7c..addd37b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEMNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEngineNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEngineNode.java
index 7bfc62b..3520650 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEngineNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMEngineNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMNode.java
index 78d7abc..1642bc1 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/AMNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.manager.common.entity.metrics.NodeHealthyInfo;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/DefaultScoreServiceInstance.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/DefaultScoreServiceInstance.java
index 8b367dd..bab1f03 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/DefaultScoreServiceInstance.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/DefaultScoreServiceInstance.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EMNode.java
index f3c5979..27e27d5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EMNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EngineNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EngineNode.java
index cc803a4..9e8ee5a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EngineNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/EngineNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/InfoRMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/InfoRMNode.java
index 45e0f77..fb7cb90 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/InfoRMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/InfoRMNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/LabelNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/LabelNode.java
index f78d139..9a00928 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/LabelNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/LabelNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.manager.label.entity.Label;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/Node.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/Node.java
index d5b7dff..d40958c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/Node.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/Node.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/RMNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/RMNode.java
index 8f5b339..25c32c0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/RMNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/RMNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
import com.webank.wedatasphere.linkis.manager.common.entity.resource.NodeResource;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/ScoreServiceInstance.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/ScoreServiceInstance.java
index 6551b7b..a9e91eb 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/ScoreServiceInstance.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/node/ScoreServiceInstance.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLabel.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLabel.java
index f4afb2a..9a1b640 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLabel.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLabel.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import com.webank.wedatasphere.linkis.manager.label.entity.GenericLabel;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLock.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLock.java
index c754241..b6773f8 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLock.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceLock.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import java.util.Date;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNode.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNode.java
index 99121d9..e0a8a6b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNode.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNode.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import java.util.Date;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java
index 7d43763..9358fc1 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeEntity.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java
index a86057a..b4f54d7 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetrics.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetricsEntity.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetricsEntity.java
index 234060a..e0ef316 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetricsEntity.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceNodeMetricsEntity.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import java.util.Date;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceResource.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceResource.java
index b856ff2..168bfec 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceResource.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/persistence/PersistenceResource.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.persistence;
import java.util.Date;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/CommonNodeResource.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/CommonNodeResource.java
index 256b197..c55d693 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/CommonNodeResource.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/CommonNodeResource.java
@@ -1,5 +1,25 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.resource;
+import java.util.Date;
+
public class CommonNodeResource implements NodeResource {
private ResourceType resourceType;
@@ -16,6 +36,10 @@
private Resource leftResource;
+ private Date createTime;
+
+ private Date updateTime;
+
public static NodeResource initNodeResource(ResourceType resourceType){
CommonNodeResource commonNodeResource = new CommonNodeResource();
commonNodeResource.setResourceType(resourceType);
@@ -33,6 +57,27 @@
public ResourceType getResourceType() {
return resourceType;
}
+
+ @Override
+ public void setCreateTime(Date createTime) {
+ this.createTime = createTime;
+ }
+
+ @Override
+ public Date getCreateTime() {
+ return createTime;
+ }
+
+ @Override
+ public Date getUpdateTime() {
+ return updateTime;
+ }
+
+ @Override
+ public void setUpdateTime(Date updateTime) {
+ this.updateTime = updateTime;
+ }
+
@Override
public void setResourceType(ResourceType resourceType) {
this.resourceType = resourceType;
@@ -78,6 +123,8 @@
this.lockedResource = lockedResource;
}
+
+
@Override
public Resource getExpectedResource() {
return expectedResource;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/NodeResource.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/NodeResource.java
index 6ad6e16..47c0d38 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/NodeResource.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/NodeResource.java
@@ -1,14 +1,40 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.resource;
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
import java.io.Serializable;
-
+import java.util.Date;
public interface NodeResource extends Serializable, RequestProtocol {
ResourceType getResourceType();
+ void setCreateTime(Date createTime);
+
+ Date getCreateTime();
+
+ Date getUpdateTime();
+
+ void setUpdateTime(Date updateTime);
+
void setResourceType(ResourceType resourceType);
void setMinResource(Resource minResource);
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/ResourceType.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/ResourceType.java
index b4f953b..ad08ec1 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/ResourceType.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/entity/resource/ResourceType.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.resource;
public enum ResourceType {
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/exception/ResourceWarnException.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/exception/ResourceWarnException.java
index db2be31..7e1e852 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/exception/ResourceWarnException.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/exception/ResourceWarnException.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.exception;
import com.webank.wedatasphere.linkis.common.exception.WarnException;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/BmlResource.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/BmlResource.java
index 8f698d7..09f6cee 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/BmlResource.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/BmlResource.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.bml;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/LocalResource.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/LocalResource.scala
index ffadb26..77dfc1b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/LocalResource.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/bml/LocalResource.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.bml
import java.util.Date
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/conf/RemoveCacheConfRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/conf/RemoveCacheConfRequest.java
new file mode 100644
index 0000000..c4b06dd
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/conf/RemoveCacheConfRequest.java
@@ -0,0 +1,46 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package com.webank.wedatasphere.linkis.manager.common.protocol.conf;
+
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel;
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.UserCreatorLabel;
+import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
+
+public class RemoveCacheConfRequest implements RequestProtocol {
+
+ private UserCreatorLabel userCreatorLabel;
+
+ private EngineTypeLabel engineTypeLabel;
+
+ public UserCreatorLabel getUserCreatorLabel() {
+ return userCreatorLabel;
+ }
+
+ public void setUserCreatorLabel(UserCreatorLabel userCreatorLabel) {
+ this.userCreatorLabel = userCreatorLabel;
+ }
+
+ public EngineTypeLabel getEngineTypeLabel() {
+ return engineTypeLabel;
+ }
+
+ public void setEngineTypeLabel(EngineTypeLabel engineTypeLabel) {
+ this.engineTypeLabel = engineTypeLabel;
+ }
+}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMInfoClearRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMInfoClearRequest.java
index e09fca3..ee1b698 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMInfoClearRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMInfoClearRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em;
import com.webank.wedatasphere.linkis.manager.common.entity.node.EMNode;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMRequest.java
index 0af7d86..77f06a8 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/EMRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em;
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMEnginesRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMEnginesRequest.java
index ae1a07c..4a29356 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMEnginesRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMEnginesRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMInfoRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMInfoRequest.java
index 7330b2c..777b288 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMInfoRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/GetEMInfoRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/PauseEMRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/PauseEMRequest.java
index 4de9458..833b0f0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/PauseEMRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/PauseEMRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/StopEMRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/StopEMRequest.java
index e7babdb..d7a9635 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/StopEMRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/em/StopEMRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAskRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAskRequest.java
index 8aa222a..ab3ccdd 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAskRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAskRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.protocol.message.RequestMethod;
@@ -15,6 +33,9 @@
private String user;
+ /**
+ * Used to identify the source of the request
+ */
private String createService;
private String description;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java
index cd88b5f..c5a46a5 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnReleaseRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineCreateRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineCreateRequest.java
index dceb9b6..0f1beff 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineCreateRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineCreateRequest.java
@@ -1,10 +1,27 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.protocol.message.RequestMethod;
import java.util.Map;
-
public class EngineCreateRequest implements EngineRequest, RequestMethod {
private Map<String, String> properties;
@@ -19,6 +36,8 @@
private String description;
+ private boolean ignoreTimeout = false;
+
public Map<String, String> getProperties() {
return properties;
}
@@ -68,6 +87,14 @@
this.description = description;
}
+ public boolean isIgnoreTimeout() {
+ return ignoreTimeout;
+ }
+
+ public void setIgnoreTimeout(boolean ignoreTimeout) {
+ this.ignoreTimeout = ignoreTimeout;
+ }
+
@Override
public String method() {
return "/engine/create";
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java
index 159f8af..741d533 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineInfoClearRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineLockType.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineLockType.java
index cef1352..b7a0028 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineLockType.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineLockType.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRecyclingRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRecyclingRequest.java
index ce2ca1e..abae0c3 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRecyclingRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRecyclingRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.manager.common.entity.recycle.RecyclingRule;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRequest.java
index c79d924..7a6351a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineReuseRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineReuseRequest.java
index 96ccecc..078baf9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineReuseRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineReuseRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import java.util.Map;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopRequest.java
index 9f5d19a..7010c4d 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopResponse.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopResponse.java
index 46cced0..e0bd131 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopResponse.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineStopResponse.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSuicideRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSuicideRequest.java
index 61e7b09..d8c0cd1 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSuicideRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSuicideRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSwitchRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSwitchRequest.java
index 58d0fa1..cae4d39 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSwitchRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineSwitchRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/HeartbeatProtocol.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/HeartbeatProtocol.java
index f93b36d..afd7309 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/HeartbeatProtocol.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/HeartbeatProtocol.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatMsg.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatMsg.java
index b2cc66c..39e2f65 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatMsg.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatMsg.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
import com.webank.wedatasphere.linkis.common.ServiceInstance;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatRequest.java
index 29c92eb..2181223 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeHeartbeatRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeRequestProtocol.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeRequestProtocol.java
index bb474f8..c8e76b7 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeRequestProtocol.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeRequestProtocol.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeStatusProtocol.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeStatusProtocol.java
index a21ab16..facdfde 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeStatusProtocol.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/NodeStatusProtocol.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
import com.webank.wedatasphere.linkis.protocol.message.RequestProtocol;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/RequestNodeStatus.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/RequestNodeStatus.java
index 3aae0f0..e38b19f 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/RequestNodeStatus.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/RequestNodeStatus.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/ResponseNodeStatus.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/ResponseNodeStatus.java
index a3ae82f..373faea 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/ResponseNodeStatus.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/ResponseNodeStatus.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/StopNodeRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/StopNodeRequest.java
index 9230a4d..bb77099 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/StopNodeRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/protocol/node/StopNodeRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/utils/ManagerUtils.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/utils/ManagerUtils.java
index 74b03ba..8d43fc9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/utils/ManagerUtils.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/java/com/webank/wedatasphere/linkis/manager/common/utils/ManagerUtils.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.utils;
import com.webank.wedatasphere.linkis.manager.common.conf.ManagerCommonConf;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/conf/ManagerCommonConf.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/conf/ManagerCommonConf.scala
index 6a907ff..40db189 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/conf/ManagerCommonConf.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/conf/ManagerCommonConf.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.conf
import com.webank.wedatasphere.linkis.common.conf.CommonVars
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/recycle/RecyclingRule.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/recycle/RecyclingRule.scala
index 137047a..c8690b6 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/recycle/RecyclingRule.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/recycle/RecyclingRule.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.entity.recycle
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/resource/Resource.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/resource/Resource.scala
index 329f595..0d75547 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/resource/Resource.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/entity/resource/Resource.scala
@@ -363,7 +363,7 @@
if (this.yarnResource != null && r.yarnResource != null &&
StringUtils.isNotEmpty(this.yarnResource.queueName) &&
StringUtils.isNotEmpty(r.yarnResource.queueName) && this.yarnResource.queueName.equals(r.yarnResource.queueName)) {
- info(s"Not module operate this:$this other:$r")
+ debug(s"Not module operate this:$this other:$r")
false
}
else
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/monitor/ManagerMonitor.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/monitor/ManagerMonitor.scala
index 794b030..1fd0ccc 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/monitor/ManagerMonitor.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/monitor/ManagerMonitor.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.monitor
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/EngineLock.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/EngineLock.scala
index bf658b2..697982d 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/EngineLock.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/EngineLock.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceHealthReport.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceHealthReport.scala
index 3ab3db3..3f5d518 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceHealthReport.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceHealthReport.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceState.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceState.scala
index 810a662..a1437a0 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceState.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/ServiceState.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/em/RegisterEMRequest.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/em/RegisterEMRequest.scala
index 4515338..4e53b9d 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/em/RegisterEMRequest.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/em/RegisterEMRequest.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.em
import java.util
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAsyncResponse.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAsyncResponse.scala
index e8f92c6..e42891e 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAsyncResponse.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineAsyncResponse.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnStatusCallback.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnStatusCallback.scala
index 6ba6c4d..56a2b2a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnStatusCallback.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/engine/EngineConnStatusCallback.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.engine
import com.webank.wedatasphere.linkis.common.ServiceInstance
@@ -11,4 +29,6 @@
/**
* engineConnManager send to Manager
*/
-case class EngineConnStatusCallbackToAM(serviceInstance: ServiceInstance, status: NodeStatus, initErrorMsg: String) extends RequestProtocol with RetryableProtocol
+case class EngineConnStatusCallbackToAM(serviceInstance: ServiceInstance, status: NodeStatus, initErrorMsg: String, canRetry: Boolean = false) extends RequestProtocol with RetryableProtocol
+
+
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/label/LabelUpdateRequest.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/label/LabelUpdateRequest.scala
index f7cb079..289480a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/label/LabelUpdateRequest.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/label/LabelUpdateRequest.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.label
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/resource/ResourceProtocol.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/resource/ResourceProtocol.scala
index 3818f40..053ee7a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/resource/ResourceProtocol.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/protocol/resource/ResourceProtocol.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.protocol.resource
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/NodeResourceSerializer.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/NodeResourceSerializer.scala
index 6fd05b1..ad675a2 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/NodeResourceSerializer.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/NodeResourceSerializer.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.serializer
import com.webank.wedatasphere.linkis.manager.common.entity.resource._
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/RegisterEMRequestSerializer.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/RegisterEMRequestSerializer.scala
index 911a9b8..c432bca 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/RegisterEMRequestSerializer.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/serializer/RegisterEMRequestSerializer.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.serializer
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/utils/ResourceUtils.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/utils/ResourceUtils.scala
index 017779c..abbf58b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/utils/ResourceUtils.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-common/src/main/scala/com/webank/wedatasphere/linkis/manager/common/utils/ResourceUtils.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.common.utils
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceResource
@@ -40,6 +58,8 @@
if (persistenceResource.getExpectedResource != null) nodeResource.setExpectedResource(deserializeResource(persistenceResource.getExpectedResource))
if (persistenceResource.getLeftResource != null) nodeResource.setLeftResource(deserializeResource(persistenceResource.getLeftResource))
if (persistenceResource.getUsedResource != null) nodeResource.setUsedResource(deserializeResource(persistenceResource.getUsedResource))
+ if (persistenceResource.getCreateTime != null) nodeResource.setCreateTime(persistenceResource.getCreateTime)
+ if (persistenceResource.getUpdateTime != null) nodeResource.setUpdateTime(persistenceResource.getUpdateTime)
nodeResource.setResourceType(ResourceType.valueOf(persistenceResource.getResourceType))
nodeResource
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/pom.xml
index 52f6815..11b6088 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!-- <relativePath>../../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelChecker.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelChecker.scala
index e912ecb..39b1ac4 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelChecker.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelChecker.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.label
import java.util
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelFilter.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelFilter.scala
index 95fc325..cce34ce 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelFilter.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/LabelFilter.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.label
import java.util
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/ManagerLabelService.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/ManagerLabelService.scala
index 5ebf53c..f7ede08 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/ManagerLabelService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/label/ManagerLabelService.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.label
import java.util
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/metrics/MetricsConverter.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/metrics/MetricsConverter.scala
index 18fba10..5b80244 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/metrics/MetricsConverter.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/metrics/MetricsConverter.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.metrics
import com.webank.wedatasphere.linkis.common.ServiceInstance
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EMNodPointer.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EMNodPointer.scala
index a669187..35536d6 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EMNodPointer.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EMNodPointer.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.pointer
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EngineNodePointer.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EngineNodePointer.scala
index d5b28cf..ca6db27 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EngineNodePointer.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/EngineNodePointer.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.pointer
import com.webank.wedatasphere.linkis.manager.common.protocol.{RequestEngineLock, RequestEngineUnlock}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointer.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointer.scala
index 66cc82e..ac1c611 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointer.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointer.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.pointer
import com.webank.wedatasphere.linkis.manager.common.entity.enumeration.NodeStatus
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointerBuilder.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointerBuilder.scala
index e412060..6d2dc3c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointerBuilder.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-manager-service-common/src/main/scala/com/webank/wedatasphere/linkis/manager/service/common/pointer/NodePointerBuilder.scala
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.manager.service.common.pointer
import com.webank.wedatasphere.linkis.manager.common.entity.node.{EMNode, EngineNode}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/pom.xml
index c61f766..31e1259 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/pom.xml
@@ -18,7 +18,7 @@
<modelVersion>4.0.0</modelVersion>
<parent>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
</parent>
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineRequest.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineRequest.java
index 75d1d9a..44da238 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineRequest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineRequest.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.resourcemanager.protocol;
import com.webank.wedatasphere.linkis.manager.common.protocol.em.EMRequest;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineResponse.java b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineResponse.java
index 1b790dd..fe4d48e 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineResponse.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/protocol/TimeoutEMEngineResponse.java
@@ -1,3 +1,21 @@
+/*
+ *
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
package com.webank.wedatasphere.linkis.resourcemanager.protocol;
public class TimeoutEMEngineResponse {
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMConfiguration.scala b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMConfiguration.scala
index cc466af..9ea3bab 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMConfiguration.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-commons/linkis-resource-manager-common/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMConfiguration.scala
@@ -30,6 +30,8 @@
val NODE_HEARTBEAT_INTERVAL = CommonVars("wds.linkis.manager.am.node.heartbeat", new TimeType("3m"))
val NODE_HEARTBEAT_MAX_UPDATE_TIME = CommonVars("wds.linkis.manager.am.node.heartbeat", new TimeType("5m"))
+ val LOCK_RELEASE_TIMEOUT = CommonVars("wds.linkis.manager.rm.lock.release.timeout", new TimeType("5m"))
+ val LOCK_RELEASE_CHECK_INTERVAL = CommonVars("wds.linkis.manager.rm.lock.release.check.interval", new TimeType("5m"))
//Resource parameter(资源参数)
val USER_AVAILABLE_CPU = CommonVars("wds.linkis.rm.client.core.max", 10)
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-monitor/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-monitor/pom.xml
index 204ecd5..df57ecd 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-monitor/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-monitor/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!--<relativePath>../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-monitor/src/main/scala/com/webank/wedatasphere/linkis/manager/monitor/node/NodeHeartbeatMonitor.scala b/linkis-computation-governance/linkis-manager/linkis-manager-monitor/src/main/scala/com/webank/wedatasphere/linkis/manager/monitor/node/NodeHeartbeatMonitor.scala
index 185fcd9..490df44 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-monitor/src/main/scala/com/webank/wedatasphere/linkis/manager/monitor/node/NodeHeartbeatMonitor.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-monitor/src/main/scala/com/webank/wedatasphere/linkis/manager/monitor/node/NodeHeartbeatMonitor.scala
@@ -1,23 +1,7 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
package com.webank.wedatasphere.linkis.manager.monitor.node
import java.util
-import java.util.concurrent.{ExecutorService, TimeUnit}
+import java.util.concurrent.{ExecutorService, TimeUnit, TimeoutException}
import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
@@ -27,7 +11,7 @@
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceNodeEntity
import com.webank.wedatasphere.linkis.manager.common.monitor.ManagerMonitor
import com.webank.wedatasphere.linkis.manager.common.protocol.em.StopEMRequest
-import com.webank.wedatasphere.linkis.manager.common.protocol.engine.{EngineStopRequest, EngineSuicideRequest}
+import com.webank.wedatasphere.linkis.manager.common.protocol.engine.{EngineInfoClearRequest, EngineStopRequest, EngineSuicideRequest}
import com.webank.wedatasphere.linkis.manager.common.protocol.node.{NodeHeartbeatMsg, NodeHeartbeatRequest}
import com.webank.wedatasphere.linkis.manager.common.utils.ManagerUtils
import com.webank.wedatasphere.linkis.manager.monitor.conf.ManagerMonitorConf
@@ -64,12 +48,12 @@
/**
- * 1. 定时三分钟扫所有的node,判断Metrics的更新时间,
- * 2. 如果更新时间超过5分钟未更新,主动发起Metrics更新请求。如果node 状态已经是Unhealthy 则直接发起kill请求
- * 3. 如果send报节点不存在,需要移除该node,判断该Node为Engine还是EM信息
- * 4. 如果send报其他异常,就标记为unhealthy
- * 5. 如果正常则更新Metrics
- */
+ * 1. 定时三分钟扫所有的node,判断Metrics的更新时间,
+ * 2. 如果更新时间超过5分钟未更新,主动发起Metrics更新请求。如果node 状态已经是Unhealthy 则直接发起kill请求
+ * 3. 如果send报节点不存在,需要移除该node,判断该Node为Engine还是EM信息
+ * 4. 如果send报其他异常,就标记为unhealthy
+ * 5. 如果正常则更新Metrics
+ */
override def run(): Unit = Utils.tryAndWarn {
info("Start to check the health of the node")
// 1. 获取nodes
@@ -86,7 +70,7 @@
(System.currentTimeMillis() - createTime) > maxInterval
case _ => true
}
- if (!existingEngineInstances.contains(engineNode.getServiceInstance) && engineIsStarted) {
+ if (!existingEngineInstances.contains(engineNode.getServiceInstance) && engineIsStarted && engineNode.getNodeStatus != null) {
warn(s"Failed to find instance ${engineNode.getServiceInstance} from eureka prepare to kill")
clearEngineNode(engineNode.getServiceInstance)
}
@@ -117,19 +101,19 @@
}
/**
- * 当EM状态为Healthy和WARN时:
- * 1. 判断Metrics的更新时间,如果超过一定时间没上报,主动发起Metrics更新请求
- * 2. 如果send异常,标识为UnHealthy
- * 3. 如果send的结果不可用则更新为对应状态
- * 4. 如果正常则更新Metrics
- * 当Engine状态为Healthy和WARN时:
- * 1. 判断Metrics的更新时间,如果超过一定时间没上报,主动发起Metrics更新请求
- * 2. 如果send异常,标识为UnHealthy
- * 3. 如果send的结果不可用则更新为UnHealthy状态
- * 4. 如果正常则更新Metrics
- *
- * @param healthyList
- */
+ * 当EM状态为Healthy和WARN时:
+ * 1. 判断Metrics的更新时间,如果超过一定时间没上报,主动发起Metrics更新请求
+ * 2. 如果send异常,标识为UnHealthy
+ * 3. 如果send的结果不可用则更新为对应状态
+ * 4. 如果正常则更新Metrics
+ * 当Engine状态为Healthy和WARN时:
+ * 1. 判断Metrics的更新时间,如果超过一定时间没上报,主动发起Metrics更新请求
+ * 2. 如果send异常,标识为UnHealthy
+ * 3. 如果send的结果不可用则更新为UnHealthy状态
+ * 4. 如果正常则更新Metrics
+ *
+ * @param healthyList
+ */
private def dealHealthyList(healthyList: util.List[NodeMetrics]): Unit = Utils.tryAndWarn {
if (null != healthyList) {
healthyList.foreach { nodeMetric =>
@@ -138,39 +122,46 @@
sender = Sender.getSender(nodeMetric.getServiceInstance)
} catch {
case n: NoInstanceExistsException =>
- updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "The corresponding service could not be found(找不到对应的服务:NoInstanceExistsException)")
+ updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "找不到对应的服务:NoInstanceExistsException")
}
if (sender == null) {
- updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "The corresponding service could not be found ,the sender is null(找不到对应的服务,获取的sender为空)")
+ updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "找不到对应的服务,获取的sender为空")
}
-
- sender.ask(new NodeHeartbeatRequest) match {
+ Utils.tryCatch(sender.ask(new NodeHeartbeatRequest) match {
case m: NodeHeartbeatMsg =>
if (!NodeHealthy.isAvailable(m.getHealthyInfo.getNodeHealthy) && managerLabelService.isEngine(nodeMetric.getServiceInstance)) {
- updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "the engine is in down state mark the enginge Unhealthy(该engine为不可用状态,标识该engine为Unhealthy)")
+ updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "该engine为不可用状态,标识该engine为Unhealthy")
} else {
messagePublisher.publish(m)
}
case _ =>
- updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "The corresponding service could not be found ,the sender is null(找不到对应的服务,获取的sender为空)")
+ updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "找不到对应的服务,获取的sender为空")
+ }){
+ case e: TimeoutException => {
+ warn(s"引擎发送RPC请求失败,找不到引擎实例:${nodeMetric.getServiceInstance},开始发送请求停止该引擎!")
+ clearEngineNode(nodeMetric.getServiceInstance)
+ }
+ case exception: Exception => {
+ warn(s"发送引擎心跳RPC请求失败,但不是由于超时引起的,不会强制停止该引擎,引擎实例:${nodeMetric.getServiceInstance}", exception)
+ }
}
}
}
}
/**
- * 当EM状态为UnHealthy时:
- * 1. manager主动要求所有的engine强制退出(engine自杀)
- * 2. 如果没有engine了,则强制退出EM
- * 3.TODO 如果EM强制退出失败,告警Major
- *
- * 当Engine为UnHealthy状态时:
- * 1. 由EM进行kill,如果失败执行2
- * 2. 由engine进行自杀,如果失败执行3
- * 3. 发送minor告警
- *
- * @param unhealthyList
- */
+ * 当EM状态为UnHealthy时:
+ * 1. manager主动要求所有的engine强制退出(engine自杀)
+ * 2. 如果没有engine了,则强制退出EM
+ * 3.TODO 如果EM强制退出失败,告警Major
+ *
+ * 当Engine为UnHealthy状态时:
+ * 1. 由EM进行kill,如果失败执行2
+ * 2. 由engine进行自杀,如果失败执行3
+ * 3. 发送minor告警
+ *
+ * @param unhealthyList
+ */
private def dealUnHealthyList(unhealthyList: util.List[NodeMetrics]): Unit = Utils.tryAndWarn {
if (null == unhealthyList) return
unhealthyList.foreach { nodeMetric =>
@@ -192,33 +183,33 @@
}
/**
- * 当EM状态为StockAvailable时:
- * 1. 判断Metrics的更新时间,如果超过一定时间没修改状态,将Node修改为StockUnavailable状态
- *
- * @param stockAvailableList
- */
+ * 当EM状态为StockAvailable时:
+ * 1. 判断Metrics的更新时间,如果超过一定时间没修改状态,将Node修改为StockUnavailable状态
+ *
+ * @param stockAvailableList
+ */
private def dealStockAvailableList(stockAvailableList: util.List[NodeMetrics]): Unit = Utils.tryAndWarn {
if (null == stockAvailableList) return
stockAvailableList.foreach { nodeMetric =>
- updateMetricHealthy(nodeMetric, NodeHealthy.StockUnavailable, " manager think the EM is in StockUnAvailable state (Manager认为该EM已经处于StockUnAvailable 状态)")
+ updateMetricHealthy(nodeMetric, NodeHealthy.StockUnavailable, "Manager认为该EM已经处于StockUnAvailable 状态")
}
}
/**
- * 当EM为StockUnavailable状态时:
- * 1. 判断该EM下面是否还有engine,如果有engine需要manager主动要求所有的engine非强制退出
- * 2. 如果EM下的engine是UnHealthy状态,则将EM翻转为UnHealthy状态
- * 3. TODO 如果StockUnavailable状态如果超过n分钟,则发送IMS告警
- *
- * @param stockUnAvailableList
- */
+ * 当EM为StockUnavailable状态时:
+ * 1. 判断该EM下面是否还有engine,如果有engine需要manager主动要求所有的engine非强制退出
+ * 2. 如果EM下的engine是UnHealthy状态,则将EM翻转为UnHealthy状态
+ * 3. TODO 如果StockUnavailable状态如果超过n分钟,则发送IMS告警
+ *
+ * @param stockUnAvailableList
+ */
private def dealStockUnAvailableList(stockUnAvailableList: util.List[NodeMetrics]): Unit = Utils.tryAndWarn {
if (null == stockUnAvailableList) return
stockUnAvailableList.foreach { nodeMetric =>
if (managerLabelService.isEM(nodeMetric.getServiceInstance)) {
val nodes = nodeManagerPersistence.getEngineNodeByEM(nodeMetric.getServiceInstance)
if (null == nodes || nodes.isEmpty) {
- updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "manager think the EM is in UnHealthy state (Manager认为该EM已经处UnHealthy状态)")
+ updateMetricHealthy(nodeMetric, NodeHealthy.UnHealthy, "Manager认为该EM已经处UnHealthy状态")
} else {
fixedThreadPoll.submit {
new Runnable {
@@ -330,7 +321,7 @@
private def updateMetricHealthy(nodeMetrics: NodeMetrics, nodeHealthy: NodeHealthy, reason: String): Unit = {
warn(s"update instance ${nodeMetrics.getServiceInstance} from ${nodeMetrics.getHealthy} to ${nodeHealthy}")
val nodeHealthyInfo = new NodeHealthyInfo
- nodeHealthyInfo.setMsg(s"Manager-Monitor think the node is in UnHealthy state reason :$reason (Manager-Monitor 认为该节点为UnHealthy状态,原因:$reason)")
+ nodeHealthyInfo.setMsg(s"Manager-Monitor 认为该节点为UnHealthy状态,原因:$reason")
nodeHealthyInfo.setNodeHealthy(nodeHealthy)
nodeMetrics.setHealthy(metricsConverter.convertHealthyInfo(nodeHealthyInfo))
nodeMetricManagerPersistence.addOrupdateNodeMetrics(nodeMetrics)
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml
index 9c4c0e9..ee8efa9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!--<relativePath>../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LabelManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LabelManagerMapper.java
index 9a0cfba..fc7fa54 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LabelManagerMapper.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LabelManagerMapper.java
@@ -10,7 +10,6 @@
import java.util.List;
import java.util.Map;
-
@Mapper
public interface LabelManagerMapper {
@@ -54,11 +53,11 @@
@Insert({
"<script>" +
- "insert into linkis_cg_manager_label_service_instance(label_id, service_instance, update_time,create_time) values " +
- "<foreach collection='labelIds' item='labelId' index='index' separator=','>" +
- "(#{labelId}, #{instance},now(),now())" +
- "</foreach>" +
- "</script>"
+ "insert into linkis_cg_manager_label_service_instance(label_id, service_instance, update_time,create_time) values " +
+ "<foreach collection='labelIds' item='labelId' index='index' separator=','>" +
+ "(#{labelId}, #{instance},now(),now())" +
+ "</foreach>" +
+ "</script>"
})
void addLabelServiceInstance(@Param("instance") String instance, @Param("labelIds") List<Integer> labelIds);
@@ -293,10 +292,21 @@
*/
List<Map<String, Object>> dimListNodeRelationsByKeyValueMap(@Param("keyValueMap") Map<String, Map<String, String>> labelKeyAndValuesMap, @Param("valueRelation") String name);
-
+ /**
+ * 通过instance信息,同时返回instance信息和label信息
+ *
+ * @param serviceInstances
+ * @return
+ */
List<Map<String, Object>> listLabelRelationByServiceInstance(@Param("nodes") List<ServiceInstance> serviceInstances);
-
+ /**
+ * 通过labelkey 和StringValue找到唯一的label
+ *
+ * @param labelKey
+ * @param stringValue
+ * @return
+ */
PersistenceLabel getLabelByKeyValue(@Param("labelKey") String labelKey, @Param("stringValue") String stringValue);
List<ServiceInstance> getNodeByLabelKeyValue(@Param("labelKey") String labelKey, @Param("stringValue") String stringValue);
@@ -309,7 +319,7 @@
/**
* 通过labelId获取到resource
- * get resource via labelId
+ *
* @param labelId
* @return
*/
@@ -317,7 +327,6 @@
/**
* 通过label的keyvalues再找到resource
- * find resource via keyvalues of label
* 和{@link LabelManagerMapper#dimListLabelByKeyValueMap(Map, String)} 区别只是多了关联和资源表,并且返回是resource
*
* @param keyValueMap
@@ -339,8 +348,9 @@
*
* @param singletonMap
*/
- void deleteResourceByLabelKeyValuesMaps(@Param("labelKeyValues") Map<String, Map<String, String>> singletonMap);
- void deleteResourceByLabelKeyValuesMapsInDirect(@Param("labelKeyValues") Map<String, Map<String, String>> singletonMap);
+ Integer selectLabelIdByLabelKeyValuesMaps(@Param("labelKeyValues") Map<String, Map<String, String>> singletonMap);
+ void deleteResourceByLabelKeyValuesMaps(Integer id);
+ void deleteResourceByLabelKeyValuesMapsInDirect(Integer id);
/**
* 批量删除 不删除label
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LockManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LockManagerMapper.java
index bdc3a63..a5fbda9 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LockManagerMapper.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/LockManagerMapper.java
@@ -1,10 +1,9 @@
package com.webank.wedatasphere.linkis.manager.dao;
-import org.apache.ibatis.annotations.Delete;
-import org.apache.ibatis.annotations.Insert;
-import org.apache.ibatis.annotations.Mapper;
-import org.apache.ibatis.annotations.Param;
+import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLock;
+import org.apache.ibatis.annotations.*;
+import java.util.List;
@Mapper
public interface LockManagerMapper {
@@ -14,4 +13,12 @@
@Delete("delete from linkis_cg_manager_lock where lock_object = #{jsonObject}")
void unlock(String jsonObject);
+
+ @Select("select * from linkis_cg_manager_lock")
+ @Results({
+ @Result(property = "updateTime", column = "update_time"),
+ @Result(property = "createTime", column = "create_time")
+ })
+ List<PersistenceLock> getAll();
+
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/NodeManagerMapper.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/NodeManagerMapper.java
index 0945a1a..d28df54 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/NodeManagerMapper.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/NodeManagerMapper.java
@@ -6,7 +6,6 @@
import java.util.List;
-
@Mapper
public interface NodeManagerMapper {
@@ -71,7 +70,7 @@
})
PersistenceNode getNodeInstanceById(@Param("id") int id);
- @Select("select * from linkis_cg_manager_service_instance where instance in (select em_instance from linkis_cg_manager_engine_em where engine_instance in (select instance from linkis_cg_manager_service_instance where instance=#{instance}))")
+ @Select("select * from linkis_cg_manager_service_instance where instance in (select em_instance from linkis_cg_manager_engine_em where engine_instance=#{instance})")
@Results({
@Result(property = "updateTime", column = "update_time"),
@Result(property = "createTime", column = "create_time")
@@ -80,7 +79,7 @@
- @Select("select * from linkis_cg_manager_service_instance where instance in ( select engine_instance from linkis_cg_manager_engine_em where em_instance in (select instance from linkis_cg_manager_service_instance where instance=#{instance}))")
+ @Select("select * from linkis_cg_manager_service_instance where instance in ( select engine_instance from linkis_cg_manager_engine_em where em_instance=#{instance})")
@Results({
@Result(property = "updateTime", column = "update_time"),
@Result(property = "createTime", column = "create_time")
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/impl/LabelManagerMapper.xml b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/impl/LabelManagerMapper.xml
index 9025e26..80c2892 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/impl/LabelManagerMapper.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/dao/impl/LabelManagerMapper.xml
@@ -95,9 +95,10 @@
<select id="listResourceByLaBelId"
resultType="com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceResource">
- SELECT r.* FROM linkis_cg_manager_label_resource lr, linkis_cg_manager_linkis_resources r
+ SELECT r.* FROM linkis_cg_manager_label_resource lr, linkis_cg_manager_linkis_resources r, linkis_cg_manager_label l
WHERE lr.resource_id = r.id and
- lr.label_id = #{labelId}
+ lr.label_id = #{labelId} and
+ l.id = #{labelId}
</select>
<select id="dimListResourceBykeyValueMap"
@@ -126,42 +127,34 @@
</select>
<delete id="deleteResourceByLabelIdInDirect">
- delete from linkis_cg_manager_linkis_resources where id in (select resource_id from linkis_cg_manager_label_resource where label_id = #{label_id});
+ delete from linkis_cg_manager_linkis_resources where id in (select resource_id from linkis_cg_manager_label_resource where label_id = #{labelId});
</delete>
<delete id="deleteResourceByLabelId">
- delete from linkis_cg_manager_label_resource where label_id = #{label_id};
+ delete from linkis_cg_manager_label_resource where label_id = #{labelId};
</delete>
+ <select id="selectLabelIdByLabelKeyValuesMaps" resultType="java.lang.Integer">
+ select tmp.id from (
+ SELECT l.id,l.label_value_size
+ FROM linkis_cg_manager_label l ,linkis_cg_manager_label_resource lr ,linkis_cg_manager_label_value_relation lvr
+ WHERE l.id = lr.label_id AND l.id = lvr.label_id
+ AND (l.label_key,lvr.label_value_key,lvr.label_value_content) IN
+ <foreach collection="labelKeyValues" index="labelKey" item="labelValues" open="(" close=")" separator=",">
+ <foreach collection="labelValues" index="valueKey" item="valueContent" open="(" close=")" separator="),(">
+ #{labelKey},#{valueKey},#{valueContent}
+ </foreach>
+ </foreach>
+ GROUP BY l.id,lr.id HAVING COUNT(1) = l.label_value_size
+ ) as tmp limit 1
+ </select>
+
<delete id="deleteResourceByLabelKeyValuesMapsInDirect">
delete from linkis_cg_manager_linkis_resources where id in (select resource_id from linkis_cg_manager_label_resource
- where label_id = (select tmp.id from (
- SELECT l.id,l.label_value_size
- FROM linkis_cg_manager_label l ,linkis_cg_manager_label_resource lr ,linkis_cg_manager_label_value_relation lvr
- WHERE l.id = lr.label_id AND l.id = lvr.label_id
- AND (l.label_key,lvr.label_value_key,lvr.label_value_content) IN
- <foreach collection="labelKeyValues" index="labelKey" item="labelValues" open="(" close=")" separator=",">
- <foreach collection="labelValues" index="valueKey" item="valueContent" open="(" close=")" separator="),(">
- #{labelKey},#{valueKey},#{valueContent}
- </foreach>
- </foreach>
- GROUP BY l.id,lr.id HAVING COUNT(1) = l.label_value_size
- ) as tmp)
- );
+ where label_id = #{id} );
</delete>
<delete id="deleteResourceByLabelKeyValuesMaps">
- delete from linkis_cg_manager_label_resource where label_id = (select tmp.id from (
- SELECT l.id,l.label_value_size
- FROM linkis_cg_manager_label l ,linkis_cg_manager_label_resource lr ,linkis_cg_manager_label_value_relation lvr
- WHERE l.id = lr.label_id AND l.id = lvr.label_id
- AND (l.label_key,lvr.label_value_key,lvr.label_value_content) IN
- <foreach collection="labelKeyValues" index="labelKey" item="labelValues" open="(" close=")" separator=",">
- <foreach collection="labelValues" index="valueKey" item="valueContent" open="(" close=")" separator="),(">
- #{labelKey},#{valueKey},#{valueContent}
- </foreach>
- </foreach>
- GROUP BY l.id,lr.id HAVING COUNT(1) = l.label_value_size
- ) as tmp);
+ delete from linkis_cg_manager_label_resource where label_id = #{id};
</delete>
<delete id="batchDeleteResourceByLabelId">
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/LockManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/LockManagerPersistence.java
index f7d23f7..8d0a638 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/LockManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/LockManagerPersistence.java
@@ -1,9 +1,25 @@
package com.webank.wedatasphere.linkis.manager.persistence;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLock;
+import java.util.List;
public interface LockManagerPersistence {
Boolean lock(PersistenceLock persistenceLock,Long timeOut);
void unlock(PersistenceLock persistenceLock);
+ List<PersistenceLock> getAll();
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ManagerPersistence.java
index d815936..6fcf310 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ManagerPersistence.java
@@ -1,6 +1,20 @@
package com.webank.wedatasphere.linkis.manager.persistence;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
public interface ManagerPersistence {
NodeManagerPersistence getNodeManagerPersistence();
LabelManagerPersistence getLabelManagerPersistence();
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeManagerPersistence.java
index 3c57779..a4c1891 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeManagerPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.common.ServiceInstance;
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode;
import com.webank.wedatasphere.linkis.manager.common.entity.node.Node;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeMetricManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeMetricManagerPersistence.java
index 1c3387c..614c2ae 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeMetricManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/NodeMetricManagerPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.manager.common.entity.metrics.NodeMetrics;
import com.webank.wedatasphere.linkis.manager.common.entity.node.Node;
import com.webank.wedatasphere.linkis.manager.exception.PersistenceErrorException;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceLabelPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceLabelPersistence.java
index 4bda391..ced7198 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceLabelPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceLabelPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.manager.common.entity.label.LabelKeyValue;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLabel;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceResource;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceManagerPersistence.java
index aa21dd6..fc64640 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/ResourceManagerPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.common.ServiceInstance;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLabel;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceResource;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLabelManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLabelManagerPersistence.java
index 7052edc..126bb45 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLabelManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLabelManagerPersistence.java
@@ -1,7 +1,22 @@
package com.webank.wedatasphere.linkis.manager.persistence.impl;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.common.ServiceInstance;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLabel;
+import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLock;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceNode;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceResource;
import com.webank.wedatasphere.linkis.manager.dao.LabelManagerMapper;
@@ -21,7 +36,6 @@
import java.util.*;
import java.util.stream.Collectors;
-
public class DefaultLabelManagerPersistence implements LabelManagerPersistence {
private final Logger logger = LoggerFactory.getLogger(this.getClass());
@@ -68,7 +82,20 @@
public void removeLabel(PersistenceLabel persistenceLabel) {
String labelKey = persistenceLabel.getLabelKey();
String labelStringValue = persistenceLabel.getStringValue();
- labelManagerMapper.deleteByLabel(labelKey, labelStringValue);
+ int labelId = persistenceLabel.getId() ;
+ if (labelId <= 0 ) {
+ PersistenceLabel labelByKeyValue = labelManagerMapper.getLabelByKeyValue(labelKey, labelStringValue);
+ if (null == labelByKeyValue) {
+ logger.warn("Can not find label labelKey {}, label value {}", labelKey, labelStringValue);
+ return;
+ }
+ labelId = labelByKeyValue.getId();
+ }
+ if (labelId > 0) {
+ labelManagerMapper.deleteLabel(labelId);
+ labelManagerMapper.deleteLabelKeyVaules(labelId);
+ }
+
}
@Override
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLockManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLockManagerPersistence.java
index cf6c2ed..2c18a51 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLockManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultLockManagerPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence.impl;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLock;
import com.webank.wedatasphere.linkis.manager.dao.LockManagerMapper;
import com.webank.wedatasphere.linkis.manager.persistence.LockManagerPersistence;
@@ -7,6 +21,7 @@
import org.slf4j.LoggerFactory;
import org.springframework.dao.DataAccessException;
+import java.util.List;
public class DefaultLockManagerPersistence implements LockManagerPersistence {
@@ -42,8 +57,8 @@
lockManagerMapper.lock(persistenceLock.getLockObject(),timeOut);
return true;
} catch (DataAccessException e){
- logger.warn("Failed to obtain lock:" + persistenceLock.getLockObject());
- return false;
+ logger.warn("Failed to obtain lock:" + persistenceLock.getLockObject());
+ return false;
}
}
@@ -52,5 +67,10 @@
lockManagerMapper.unlock(persistenceLock.getLockObject());
}
+ @Override
+ public List<PersistenceLock> getAll() {
+ return lockManagerMapper.getAll();
+ }
+
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultManagerPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultManagerPersistence.java
index 93dca7b..eee1b32 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultManagerPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultManagerPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence.impl;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.webank.wedatasphere.linkis.manager.persistence.*;
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultResourceLabelPersistence.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultResourceLabelPersistence.java
index e4a7962..95dca94 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultResourceLabelPersistence.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/main/java/com/webank/wedatasphere/linkis/manager/persistence/impl/DefaultResourceLabelPersistence.java
@@ -1,5 +1,19 @@
package com.webank.wedatasphere.linkis.manager.persistence.impl;
-
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
import com.google.common.collect.Iterables;
import com.webank.wedatasphere.linkis.manager.common.entity.label.LabelKeyValue;
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLabel;
@@ -12,6 +26,7 @@
import com.webank.wedatasphere.linkis.manager.util.PersistenceUtils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
+import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.transaction.annotation.Transactional;
@@ -22,7 +37,6 @@
import java.util.function.Supplier;
import java.util.stream.Collectors;
-
public class DefaultResourceLabelPersistence implements ResourceLabelPersistence {
private static final Logger logger = LoggerFactory.getLogger(DefaultResourceLabelPersistence.class);
@@ -66,23 +80,19 @@
}
@Override
- // @Transactional(rollbackFor = Throwable.class)
+ // @Transactional(rollbackFor = Throwable.class)
public void setResourceToLabel(PersistenceLabel label, PersistenceResource persistenceResource) {
if (label == null || persistenceResource == null) return;
- //label id 不为空,则直接通过label_id 查询,否则通过 value_key and value_content 查询
- Integer labelId = label.getId();
+
// TODO: 2020/8/31 id 的包装类
- if (labelId <= 0) {
- if (MapUtils.isEmpty(label.getValue())) return;
- List<PersistenceLabel> resourceLabels = labelManagerMapper.listLabelByKeyValueMap(Collections.singletonMap(label.getLabelKey(), label.getValue()));
- labelId = resourceLabels.stream().findFirst().orElseGet(new Supplier<PersistenceLabel>() {
- @Override
- public PersistenceLabel get() {
- labelManagerMapper.registerLabel(label);
- return label;
- }
- }).getId();
- label.setId(labelId);
+ if (label.getId() <= 0) {
+ if (StringUtils.isEmpty(label.getStringValue())) return;
+ PersistenceLabel resourceLabel = labelManagerMapper.getLabelByKeyValue(label.getLabelKey(), label.getStringValue());
+ if (null == resourceLabel) {
+ labelManagerMapper.registerLabel(label);
+ } else {
+ label.setId(resourceLabel.getId());
+ }
}
//插入resource和relation记录
@@ -91,7 +101,7 @@
//找到label对应的persistenceResourceId,不存在就插入,存在就更新
// TODO: 2020/9/8 多resource的判断,persistenceResource 有id的话,直接update这个
- List <PersistenceResource> resourceByLabels = labelManagerMapper.listResourceByLaBelId(labelId);
+ List <PersistenceResource> resourceByLabels = labelManagerMapper.listResourceByLaBelId(label.getId());
if (CollectionUtils.isNotEmpty(resourceByLabels)) {
if(resourceByLabels.size() > 1){
logger.warn("Label[" + label + "]has resource size > 1");
@@ -100,7 +110,7 @@
resourceManagerMapper.nodeResourceUpdateByResourceId(resourceToUpdate.getId(), persistenceResource);
} else {
resourceManagerMapper.registerResource(persistenceResource);
- labelManagerMapper.addLabelsAndResource(persistenceResource.getId(), Collections.singletonList(labelId));
+ labelManagerMapper.addLabelsAndResource(persistenceResource.getId(), Collections.singletonList(label.getId()));
}
}
@@ -111,24 +121,33 @@
if (label.getId() != null && label.getId() > 0) {
return labelManagerMapper.listResourceByLaBelId(label.getId());
} else {
+ PersistenceLabel dbLabel = labelManagerMapper.getLabelByKeyValue(label.getLabelKey(), label.getStringValue());
+ if (null == dbLabel) {
+ return Collections.emptyList();
+ } else {
+ return labelManagerMapper.listResourceByLaBelId(dbLabel.getId());
+ }
+ }
+ /*else {
Map<String, String> value = label.getValue();
if (MapUtils.isEmpty(value)) return Collections.emptyList();
String dimType = Label.ValueRelation.ALL.name();
return labelManagerMapper.dimListResourceBykeyValueMap(Collections.singletonMap(label.getLabelKey(), value), dimType);
- }
+ }*/
}
@Override
- // @Transactional(rollbackFor = Throwable.class)
+ // @Transactional(rollbackFor = Throwable.class)
public void removeResourceByLabel(PersistenceLabel label) {
//label id 不为空,则直接通过label_id 查询,否则通过 value_key and value_content 查询
- if (label.getId() != null) {
- labelManagerMapper.deleteResourceByLabelId(label.getId());
- labelManagerMapper.deleteResourceByLabelIdInDirect(label.getId());
- } else {
- Map<String, String> value = label.getValue();
- labelManagerMapper.deleteResourceByLabelKeyValuesMaps(Collections.singletonMap(label.getLabelKey(), value));
- labelManagerMapper.deleteResourceByLabelKeyValuesMapsInDirect(Collections.singletonMap(label.getLabelKey(), value));
+ int labelId = label.getId() ;
+ if (labelId <= 0 ) {
+ PersistenceLabel labelByKeyValue = labelManagerMapper.getLabelByKeyValue(label.getLabelKey(), label.getStringValue());
+ labelId = labelByKeyValue.getId();
+ }
+ if (labelId > 0) {
+ labelManagerMapper.deleteResourceByLabelIdInDirect(labelId);
+ labelManagerMapper.deleteResourceByLabelId(labelId);
}
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/PersistenceTest.java b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/PersistenceTest.java
index 1f00439..46fe60b 100644
--- a/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/PersistenceTest.java
+++ b/linkis-computation-governance/linkis-manager/linkis-manager-persistence/src/test/java/PersistenceTest.java
@@ -149,8 +149,6 @@
key1.put("alias", "em");
key1.put("key2", "value2");
labelKeyValues.put("serverAlias", key1);
- labelManagerMapper.deleteResourceByLabelKeyValuesMaps(labelKeyValues);
- labelManagerMapper.deleteResourceByLabelKeyValuesMapsInDirect(labelKeyValues);
}
@Test
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/pom.xml b/linkis-computation-governance/linkis-manager/linkis-resource-manager/pom.xml
index 7f79bcb..caa78ba 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/pom.xml
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/pom.xml
@@ -18,7 +18,7 @@
<modelVersion>4.0.0</modelVersion>
<parent>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
</parent>
@@ -260,6 +260,7 @@
<scope>provided</scope>
</dependency>
+
</dependencies>
<build>
<plugins>
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/domain/RMLabelContainer.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/domain/RMLabelContainer.java
index 518fe22..a55e17c 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/domain/RMLabelContainer.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/domain/RMLabelContainer.java
@@ -21,6 +21,7 @@
import com.webank.wedatasphere.linkis.manager.label.builder.CombinedLabelBuilder;
import com.webank.wedatasphere.linkis.manager.label.entity.CombinedLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.Label;
+import com.webank.wedatasphere.linkis.manager.label.entity.ResourceLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.em.EMInstanceLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineInstanceLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel;
@@ -31,7 +32,9 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.util.ArrayList;
import java.util.List;
+import java.util.stream.Collectors;
public class RMLabelContainer {
@@ -65,6 +68,13 @@
return labels;
}
+ public List<Label<?>> getResourceLabels() {
+ if (null != labels) {
+ return labels.stream().filter(label -> label instanceof ResourceLabel).collect(Collectors.toList());
+ }
+ return new ArrayList<>();
+ }
+
public Label find(Class labelClass) {
for (Label label : labels) {
if (labelClass.isInstance(label)) {
@@ -91,9 +101,9 @@
public EngineTypeLabel getEngineTypeLabel() throws RMErrorException {
if(engineTypeLabel == null){
for (Label label : labels) {
- if(label instanceof EngineTypeLabel){
- return (EngineTypeLabel) label;
- }
+ if(label instanceof EngineTypeLabel){
+ return (EngineTypeLabel) label;
+ }
}
} else {
return engineTypeLabel;
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
index 7927b52..ec5b2ba 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/java/com/webank/wedatasphere/linkis/resourcemanager/external/service/impl/ExternalResourceServiceImpl.java
@@ -34,6 +34,7 @@
import com.webank.wedatasphere.linkis.resourcemanager.external.yarn.YarnResourceRequester;
import com.webank.wedatasphere.linkis.resourcemanager.utils.RMConfiguration;
import com.webank.wedatasphere.linkis.resourcemanager.utils.RMUtils;
+import org.apache.commons.lang.exception.ExceptionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
@@ -97,15 +98,17 @@
private Object retry(int retryNum, Function function) throws RMErrorException {
int times = 0;
+ String errorMsg = "Failed to request external resource";
while(times < retryNum){
try{
return function.apply(null);
} catch (Exception e){
+ errorMsg = "Failed to request external resource" + ExceptionUtils.getRootCauseMessage(e);
logger.warn("failed to request external resource provider", e);
times ++;
}
}
- throw new RMErrorException(11006, "failed to request external resource provider");
+ throw new RMErrorException(11006, errorMsg);
}
private ExternalResourceProvider chooseProvider(ResourceType resourceType, RMLabelContainer labelContainer) throws RMErrorException {
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/RMReceiver.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/RMReceiver.scala
index 8ae31c6..14593af 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/RMReceiver.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/RMReceiver.scala
@@ -26,7 +26,6 @@
import scala.concurrent.duration.Duration
-@Component
class RMReceiver extends Receiver with Logging {
@Autowired
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
index 80f2f5e..147747d 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/external/yarn/YarnResourceRequester.scala
@@ -50,11 +50,6 @@
info(s"rmWebAddress: $rmWebAddress")
val queueName = identifier.asInstanceOf[YarnResourceIdentifier].getQueueName
this.provider = provider
- def getHadoopVersion() = if(provider.getConfigMap.get("hadoopVersion") != null) provider.getConfigMap.get("hadoopVersion").asInstanceOf[String] else {
- val resourceManagerVersion = getResponseByUrl("info", rmWebAddress) \ "clusterInfo" \ "resourceManagerVersion"
- info(s"Hadoop version is $resourceManagerVersion")
- resourceManagerVersion.values.asInstanceOf[String]
- }
def getYarnResource(jValue: Option[JValue]) = jValue.map(r => new YarnResource((r \ "memory").asInstanceOf[JInt].values.toLong * 1024l * 1024l, (r \ "vCores").asInstanceOf[JInt].values.toInt, 0, queueName))
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/message/RMMessageService.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/message/RMMessageService.scala
index 28181e2..917b41a 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/message/RMMessageService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/message/RMMessageService.scala
@@ -56,12 +56,24 @@
@Receiver
def dealWithNodeHeartbeatMsg(nodeHeartbeatMsg: NodeHeartbeatMsg, smc: ServiceMethodContext): Unit = {
- info(s"Start to deal with nodeHeartbeatMsg resource info $nodeHeartbeatMsg")
- val labels = nodeLabelService.getNodeLabels(nodeHeartbeatMsg.getServiceInstance)
+ debug(s"Start to deal with nodeHeartbeatMsg resource info $nodeHeartbeatMsg")
+ /*val labels = nodeLabelService.getNodeLabels(nodeHeartbeatMsg.getServiceInstance)
if (managerLabelService.isEngine(labels) && !nodeHeartbeatMsg.getStatus.equals(NodeStatus.ShuttingDown)) {
resourceManager.resourceReport(labels, nodeHeartbeatMsg.getNodeResource)
- }
- info(s"Finished to deal with nodeHeartbeatMsg resource info $nodeHeartbeatMsg")
+ }*/
+ debug(s"Finished to deal with nodeHeartbeatMsg resource info $nodeHeartbeatMsg")
+ /* info(s"Start to deal with resourceUsedProtocol $nodeHeartbeatMsg")
+ val labels = nodeLabelService.getNodeLabels(nodeHeartbeatMsg.getServiceInstance)
+ resourceManager.resourceUsed(labels, nodeHeartbeatMsg.getNodeResource)
+ if(managerLabelService.isEM(nodeHeartbeatMsg.getServiceInstance)){
+ //resourceManager.register(nodeHeartbeatMsg.getServiceInstance, nodeHeartbeatMsg.getNodeResource)
+ } else if(managerLabelService.isEngine(nodeHeartbeatMsg.getServiceInstance)){
+ info(s"Start to deal with resourceUsedProtocol $nodeHeartbeatMsg")
+
+
+ } else {
+ info(s"${nodeHeartbeatMsg.getServiceInstance} is neither EM nor Engine")
+ }*/
}
@@ -77,7 +89,7 @@
resourceManager.register(registerEMRequest.getServiceInstance, registerEMRequest.getNodeResource)
}
- @Receiver
+
def dealWithStopEMRequest(stopEMRequest: StopEMRequest, smc: ServiceMethodContext): Unit = {
resourceManager.unregister(stopEMRequest.getEm)
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/restful/RMMonitorRest.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/restful/RMMonitorRest.scala
index 20f0740..2897785 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/restful/RMMonitorRest.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/restful/RMMonitorRest.scala
@@ -237,19 +237,25 @@
appendMessageData(message, "engines", engines)
}
+ /**
+ * 仅用于向下兼容老接口
+ * @param request
+ * @param param
+ * @return
+ */
@POST
@Path("enginekill")
def killEngine(@Context request: HttpServletRequest, param: util.ArrayList[util.Map[String, AnyRef]]): Response = {
val userName = SecurityFilter.getLoginUsername(request)
for (engineParam <- param) {
- val moduleName = engineParam.get("engineType").asInstanceOf[String]
+ val moduleName = engineParam.get("applicationName").asInstanceOf[String]
val engineInstance = engineParam.get("engineInstance").asInstanceOf[String]
val stopEngineRequest = new EngineStopRequest(ServiceInstance(moduleName, engineInstance), userName)
val job = messagePublisher.publish(stopEngineRequest)
Utils.tryAndWarn(job.get(RMUtils.MANAGER_KILL_ENGINE_EAIT.getValue.toLong, TimeUnit.MILLISECONDS))
info(s"Finished to kill engine ")
}
- Message.ok("success to submit the request of kill engine (成功提交kill引擎请求)。")
+ Message.ok("成功提交kill引擎请求。")
}
@@ -275,7 +281,7 @@
usedCPUPercentage = usedResource.queueCores.asInstanceOf[Double] / maxResource.queueCores.asInstanceOf[Double]
queueInfo.put("usedPercentage", Map("memory" -> usedMemoryPercentage, "cores" -> usedCPUPercentage))
appendMessageData(message, "queueInfo", queueInfo)
- case _ => Message.error("failed to get queue resource (获取队列资源失败)")
+ case _ => Message.error("获取队列资源失败")
}
val userResourceRecords = new ArrayBuffer[mutable.HashMap[String, Any]]()
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/RequestResourceService.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/RequestResourceService.scala
index 7ab52c2..f3fafae 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/RequestResourceService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/RequestResourceService.scala
@@ -22,32 +22,38 @@
import com.webank.wedatasphere.linkis.resourcemanager.domain.RMLabelContainer
import com.webank.wedatasphere.linkis.resourcemanager.exception.RMWarnException
import com.webank.wedatasphere.linkis.resourcemanager.utils.RMUtils.aggregateResource
-import com.webank.wedatasphere.linkis.resourcemanager.utils.{AlertUtils, RMConfiguration, UserConfiguration}
+import com.webank.wedatasphere.linkis.resourcemanager.utils.{AlertUtils, RMConfiguration, RMUtils, UserConfiguration}
abstract class RequestResourceService(labelResourceService: LabelResourceService) extends Logging{
val resourceType: ResourceType = ResourceType.Default
+ val enableRequest = RMUtils.RM_REQUEST_ENABLE.getValue
+
def canRequest(labelContainer: RMLabelContainer, resource: NodeResource): Boolean = {
var labelResource = labelResourceService.getLabelResource(labelContainer.getCurrentLabel)
// for configuration resource
- if(labelContainer.getCurrentLabel.equals(labelContainer.getCombinedUserCreatorEngineTypeLabel)){
+ if(labelContainer.getCombinedUserCreatorEngineTypeLabel.equals(labelContainer.getCurrentLabel)){
if(labelResource == null) {
labelResource = new CommonNodeResource
labelResource.setResourceType(resource.getResourceType)
labelResource.setUsedResource(Resource.initResource(resource.getResourceType))
labelResource.setLockedResource(Resource.initResource(resource.getResourceType))
+ info(s"ResourceInit:${labelContainer.getCurrentLabel.getStringValue}")
}
val configuredResource = UserConfiguration.getUserConfiguredResource(resource.getResourceType, labelContainer.getUserCreatorLabel, labelContainer.getEngineTypeLabel)
- info(s"Get configured resource ${configuredResource} for [${labelContainer.getUserCreatorLabel}] and [${labelContainer.getEngineTypeLabel}]")
+ debug(s"Get configured resource ${configuredResource} for [${labelContainer.getUserCreatorLabel}] and [${labelContainer.getEngineTypeLabel}]")
labelResource.setMaxResource(configuredResource)
labelResource.setMinResource(Resource.initResource(labelResource.getResourceType))
labelResource.setLeftResource(labelResource.getMaxResource - labelResource.getUsedResource - labelResource.getLockedResource)
+ labelResourceService.setLabelResource(labelContainer.getCurrentLabel, labelResource)
+ info(s"${labelContainer.getCurrentLabel} to request [${resource.getMinResource}] \t labelResource: Max: ${labelResource.getMaxResource} \t " +
+ s"use: ${labelResource.getUsedResource} \t locked: ${labelResource.getLockedResource}")
}
- info(s"Label [${labelContainer.getCurrentLabel}] has resource + [${labelResource }]")
+ debug(s"Label [${labelContainer.getCurrentLabel}] has resource + [${labelResource }]")
if(labelResource != null){
- val labelAvailableResource = labelResource.getLeftResource - labelResource.getMinResource
- if(labelAvailableResource < resource.getMinResource){
+ val labelAvailableResource = labelResource.getLeftResource
+ if(labelAvailableResource < resource.getMinResource && enableRequest){
info(s"Failed check: ${labelContainer.getUserCreatorLabel.getUser} want to use label [${labelContainer.getCurrentLabel}] resource[${resource.getMinResource}] > label available resource[${labelAvailableResource}]")
// TODO sendAlert(moduleInstance, user, creator, requestResource, moduleAvailableResource.resource, moduleLeftResource)
val notEnoughMessage = generateNotEnoughMessage(aggregateResource(labelResource.getUsedResource, labelResource.getLockedResource), labelAvailableResource)
@@ -81,35 +87,35 @@
def generateNotEnoughMessage(requestResource: Resource, availableResource: Resource) : (Int, String) = {
requestResource match {
case m: MemoryResource =>
- (11011, s"The remote server is out of memory resources(远程服务器内存资源不足。)")
+ (11011, s"Drive memory resources are insufficient, to reduce the drive memory(内存资源不足,建议调小驱动内存)")
case c: CPUResource =>
- (11012, s"Insufficient CPU resources on remote server (远程服务器CPU资源不足。)")
+ (11012, s"CPU resources are insufficient, to reduce the number of driver cores(CPU资源不足,建议调小驱动核数)")
case i: InstanceResource =>
- (11013, s"The remote server is out of resources (远程服务器资源不足。)")
+ (11013, s"Insufficient number of instances, idle engines can be killed(实例数不足,可以kill空闲的引擎)")
case l: LoadResource =>
val loadAvailable = availableResource.asInstanceOf[LoadResource]
if(l.cores > loadAvailable.cores){
- (11012, s"Insufficient CPU resources on remote server (远程服务器CPU资源不足。)")
+ (11012, s"CPU resources are insufficient, to reduce the number of driver cores(CPU资源不足,建议调小驱动核数)")
} else {
- (11011, s"The remote server is out of memory resources(远程服务器内存资源不足。)")
+ (11011, s"Drive memory resources are insufficient, to reduce the drive memory(内存资源不足,建议调小驱动内存)")
}
case li: LoadInstanceResource =>
val loadInstanceAvailable = availableResource.asInstanceOf[LoadInstanceResource]
if(li.cores > loadInstanceAvailable.cores){
- (11012, s"Insufficient CPU resources on remote server (远程服务器CPU资源不足。)")
+ (11012, s"CPU resources are insufficient, to reduce the number of driver cores(CPU资源不足,建议调小驱动核数)")
} else if (li.memory > loadInstanceAvailable.memory) {
- (11011, s"The remote server is out of memory resources(远程服务器内存资源不足。)")
+ (11011, s"Drive memory resources are insufficient, to reduce the drive memory(内存资源不足,建议调小驱动内存)")
} else {
- (11013, s"The remote server is out of resources (远程服务器资源不足。)")
+ (11013, s"Insufficient number of instances, idle engines can be killed(实例数不足,可以kill空闲的引擎)")
}
case yarn: YarnResource =>
val yarnAvailable = availableResource.asInstanceOf[YarnResource]
if(yarn.queueCores > yarnAvailable.queueCores){
- (11014, s"The queue CPU resources are insufficient, it is recommended to reduce the number of executors(队列CPU资源不足,建议调小执行器个数。)")
+ (11014, s"Queue CPU resources are insufficient, reduce the number of executors.(队列CPU资源不足,建议调小执行器个数)")
} else if (yarn.queueMemory > yarnAvailable.queueMemory){
- (11015, s"Insufficient queue memory resources, it is recommended to reduce the actuator memory (队列内存资源不足,建议调小执行器内存。)")
+ (11015, s"Insufficient queue memory resources, reduce the executor memory")
} else {
- (11016, s"Number of queue instances exceeds limit (队列实例数超过限制。)")
+ (11016, s"Insufficient number of queue instances, idle engines can be killed(队列实例数不足,可以kill空闲的引擎)")
}
case dy: DriverAndYarnResource =>
val dyAvailable = availableResource.asInstanceOf[DriverAndYarnResource]
@@ -117,10 +123,10 @@
dy.loadInstanceResource.cores > dyAvailable.loadInstanceResource.cores ||
dy.loadInstanceResource.instances > dyAvailable.loadInstanceResource.instances){
val detail = generateNotEnoughMessage(dy.loadInstanceResource, dyAvailable.loadInstanceResource)
- (detail._1, s"Request a server resource, ${detail._2} 请求服务器资源时,${detail._2}")
+ (detail._1, s"When requesting server resources,${detail._2}")
} else {
val detail = generateNotEnoughMessage(dy.yarnResource, dyAvailable.yarnResource)
- (detail._1, s"Request a server resource, ${detail._2} 请求队列资源时,${detail._2}")
+ (detail._1, s"When requesting server resources,${detail._2}")
}
case s: SpecialResource => throw new RMWarnException(11003,"not supported resource type " + s.getClass)
case r: Resource => throw new RMWarnException(11003,"not supported resource type " + r.getClass)
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/ResourceLockService.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/ResourceLockService.scala
index beddc7a..d897690 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/ResourceLockService.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/ResourceLockService.scala
@@ -20,13 +20,15 @@
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceLock
-import com.webank.wedatasphere.linkis.manager.label.entity.EngineNodeLabel
+import com.webank.wedatasphere.linkis.manager.label.entity.{EngineNodeLabel, ResourceLabel}
import com.webank.wedatasphere.linkis.manager.persistence.LockManagerPersistence
import com.webank.wedatasphere.linkis.resourcemanager.domain.RMLabelContainer
import org.apache.commons.lang.StringUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
+import scala.collection.JavaConversions._
+
@Component
class ResourceLockService extends Logging {
@@ -38,7 +40,7 @@
def tryLock(labelContainer: RMLabelContainer, timeout: Long): Boolean = {
if(StringUtils.isBlank(labelContainer.getCurrentLabel.getStringValue)
- || !labelContainer.getCurrentLabel.isInstanceOf[EngineNodeLabel]
+ || !labelContainer.getCurrentLabel.isInstanceOf[ResourceLabel]
|| labelContainer.getLockedLabels.contains(labelContainer.getCurrentLabel)){
return true
}
@@ -83,4 +85,14 @@
}
}
+ def clearTimeoutLock(timeout: Long) = {
+ val currentTime = System.currentTimeMillis
+ lockManagerPersistence.getAll.foreach{ lock =>
+ if(currentTime - lock.getCreateTime.getTime > timeout){
+ lockManagerPersistence.unlock(lock)
+ warn("timeout force unlock " + lock.getLockObject)
+ }
+ }
+ }
+
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/DefaultResourceManager.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/DefaultResourceManager.scala
index 2785a05..285b188 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/DefaultResourceManager.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/DefaultResourceManager.scala
@@ -28,14 +28,13 @@
import com.webank.wedatasphere.linkis.manager.common.entity.resource.{CommonNodeResource, NodeResource, Resource, ResourceType}
import com.webank.wedatasphere.linkis.manager.common.utils.ResourceUtils
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
-import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.manager.label.entity.{Label, ResourceLabel}
import com.webank.wedatasphere.linkis.manager.label.entity.em.EMInstanceLabel
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineInstanceLabel
import com.webank.wedatasphere.linkis.manager.label.service.NodeLabelService
import com.webank.wedatasphere.linkis.manager.persistence.{NodeManagerPersistence, ResourceManagerPersistence}
import com.webank.wedatasphere.linkis.resourcemanager._
import com.webank.wedatasphere.linkis.resourcemanager.domain.RMLabelContainer
-import com.webank.wedatasphere.linkis.resourcemanager.errorcode.RMErrorConstants
import com.webank.wedatasphere.linkis.resourcemanager.exception.{RMErrorException, RMWarnException}
import com.webank.wedatasphere.linkis.resourcemanager.external.service.ExternalResourceService
import com.webank.wedatasphere.linkis.resourcemanager.protocol.{TimeoutEMEngineRequest, TimeoutEMEngineResponse}
@@ -66,6 +65,9 @@
@Autowired
var externalResourceService: ExternalResourceService = _
+ @Autowired
+ var resourceLogService: ResourceLogService = _
+
/*@Autowired
var nodeHeartbeatMonitor: NodeHeartbeatMonitor = _*/
@@ -79,6 +81,15 @@
new DefaultReqResourceService(labelResourceService),
new DriverAndYarnReqResourceService(labelResourceService, externalResourceService)
)
+ // submit force release timeout lock job
+ Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
+ override def run(): Unit = {
+ info("Start force release timeout locks")
+ resourceLockService.clearTimeoutLock(RMConfiguration.LOCK_RELEASE_TIMEOUT.getValue.toLong)
+ }
+ }, RMConfiguration.LOCK_RELEASE_CHECK_INTERVAL.getValue.toLong, RMConfiguration.LOCK_RELEASE_CHECK_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS)
+ // submit NodeHeartbeatMonitor job
+ //Utils.defaultScheduler.scheduleAtFixedRate(nodeHeartbeatMonitor, RMConfiguration.NODE_HEARTBEAT_INTERVAL.getValue.toLong, RMConfiguration.NODE_HEARTBEAT_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS)
}
/**
@@ -96,7 +107,7 @@
Utils.tryFinally {
// lock labels
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
resourceLockService.tryLock(labelContainer)
@@ -110,7 +121,15 @@
}
}
// TODO get ID Label set label resource
- labelResourceService.setLabelResource(eMInstanceLabel, resource)
+ Utils.tryCatch{
+ labelResourceService.setLabelResource(eMInstanceLabel, resource)
+ resourceLogService.success(ChangeType.ECM_INIT, null, eMInstanceLabel)
+ }{
+ case exception: Exception => {
+ resourceLogService.failed(ChangeType.ECM_INIT, null, eMInstanceLabel, exception)
+ }
+ case _ =>
+ }
} {
//5.Release lock(释放锁)
resourceLockService.unLock(labelContainer)
@@ -130,26 +149,37 @@
val labelContainer = new RMLabelContainer(Lists.newArrayList(eMInstanceLabel))
Utils.tryFinally {
- // lock labels
- labelContainer.getLabels.toArray.foreach {
- case label: Label[_] =>
- labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
- resourceLockService.tryLock(labelContainer)
+ // No need to lock for ECM unregistration
+// labelContainer.getLabels.toArray.foreach {
+// case label: Label[_] =>
+// labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
+// resourceLockService.tryLock(labelContainer)
+// case _ =>
+// }
+
+ // clear EM related engine resource records
+ Utils.tryCatch{
+ nodeManagerPersistence.getEngineNodeByEM(serviceInstance).foreach{ node =>
+ val engineInstanceLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EngineInstanceLabel])
+ engineInstanceLabel.setInstance(node.getServiceInstance.getInstance)
+ engineInstanceLabel.setServiceName(node.getServiceInstance.getApplicationName)
+ labelResourceService.removeResourceByLabel(engineInstanceLabel)
+ }
+ labelResourceService.removeResourceByLabel(eMInstanceLabel)
+ labelContainer.setCurrentLabel(eMInstanceLabel)
+ resourceLockService.unLock(labelContainer)
+ resourceLogService.success(ChangeType.ECM_CLEAR, null, eMInstanceLabel)
+ }{
+ case exception: Exception => {
+ resourceLogService.failed(ChangeType.ECM_CLEAR, null, eMInstanceLabel, exception)
+ }
case _ =>
}
- // clear EM related engine resource records
- nodeManagerPersistence.getEngineNodeByEM(serviceInstance).foreach{ node =>
- val engineInstanceLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[EngineInstanceLabel])
- engineInstanceLabel.setInstance(node.getServiceInstance.getInstance)
- engineInstanceLabel.setServiceName(node.getServiceInstance.getApplicationName)
- labelResourceService.removeResourceByLabel(engineInstanceLabel)
- }
- labelResourceService.removeResourceByLabel(eMInstanceLabel)
-
} {
//5.Release lock(释放锁)
- resourceLockService.unLock(labelContainer)
+ //resourceLockService.unLock(labelContainer)
+ info(s"ECMResourceClear:${serviceInstance}, usedResource:${Resource.initResource(ResourceType.Default).toJson}")
info(s"finished processing unregistration of ${serviceInstance}")
}
}
@@ -167,24 +197,22 @@
}
/**
- * Request resources and wait for a certain amount of time until the requested resource is met
- * 请求资源,并等待一定的时间,直到满足请求的资源
- *
- * @param labels
- * @param resource
- * @param wait
- * @return
- */
+ * Request resources and wait for a certain amount of time until the requested resource is met
+ * 请求资源,并等待一定的时间,直到满足请求的资源
+ *
+ * @param labels
+ * @param resource
+ * @param wait
+ * @return
+ */
override def requestResource(labels: util.List[Label[_]], resource: NodeResource, wait: Long): ResultResource = {
- val startTime = System.currentTimeMillis
val labelContainer = labelResourceService.enrichLabels(new RMLabelContainer(labels))
-
- info("start processing request resource for labels [" + labelContainer + "] and resource [" + resource + "]")
+ debug("start processing request resource for labels [" + labelContainer + "] and resource [" + resource + "]")
// pre-check without lock
val requestResourceService = getRequestResourceService(resource.getResourceType)
try {
- labelContainer.getLabels.toArray.foreach{ label =>
- labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
+ labelContainer.getResourceLabels.foreach{ label =>
+ labelContainer.setCurrentLabel(label)
if (!requestResourceService.canRequest(labelContainer, resource)){
return NotEnoughResource(s"Labels:$labels not enough resource")
}
@@ -195,14 +223,13 @@
Utils.tryFinally {
// lock labels
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
labelContainer.setCurrentLabel(label)
- var locked = resourceLockService.tryLock(labelContainer)
- if(wait <= 0){
- locked = resourceLockService.tryLock(labelContainer)
+ val locked = if(wait <= 0){
+ resourceLockService.tryLock(labelContainer)
} else {
- locked = resourceLockService.tryLock(labelContainer, wait)
+ resourceLockService.tryLock(labelContainer, wait)
}
if (!locked){
return NotEnoughResource("Wait for lock time out")
@@ -220,18 +247,25 @@
resource.setLockedResource(resource.getMinResource)
// lock label resources
- labelContainer.getLabels.toArray.foreach {
+ val labelResourceList = new util.HashMap[String, NodeResource]()
+ labelContainer.getResourceLabels.foreach(label => {
+ val usedResource = labelResourceService.getLabelResource(label)
+ if(usedResource == null){
+ info(s"Resource label: ${label.getStringValue} has no usedResource, please check, refuse request usedResource!" +
+ s"(资源标签:${label.getStringValue}),缺少对应的资源记录,可能是该标签的资源没有初始化,请及时检查,此次申请资源失败!")
+ throw new RMErrorException(110022, s"Resource label: ${label.getStringValue} has no usedResource, please check, refuse request usedResource!" +
+ s"(资源标签:${label.getStringValue}),缺少对应的资源记录,可能是该标签的资源没有初始化,请及时检查,此次申请资源失败!")
+ }
+ labelResourceList.put(label.getStringValue, usedResource)
+ })
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
- var labelResource = labelResourceService.getLabelResource(label)
- if(labelResource == null){
- if(label.equals(labelContainer.getCombinedUserCreatorEngineTypeLabel)){
- labelResource = CommonNodeResource.initNodeResource(resource.getResourceType)
- labelResource.setLockedResource(resource.getMinResource)
- }
- } else {
+ val labelResource = labelResourceList.get(label.getStringValue)
+ if(labelResource != null){
labelResource.setLeftResource(labelResource.getLeftResource - resource.getLockedResource)
labelResource.setLockedResource(labelResource.getLockedResource + resource.getLockedResource)
labelResourceService.setLabelResource(label, labelResource)
+ info(s"ResourceChanged:${label.getStringValue} --> ${labelResource}")
}
case _ =>
}
@@ -260,23 +294,22 @@
RMConfiguration.RM_WAIT_EVENT_TIME_OUT.getValue,
TimeUnit.MILLISECONDS
)
- info(s"labels $labels succeed to request resource:$resource")
AvailableResource(tickedId)
} {
//5.Release lock(释放锁)
resourceLockService.unLock(labelContainer)
- info(s"finished processing requestResource of labels ${labels} and resource ${resource}")
+ debug(s"finished processing requestResource of labels ${labels} and resource ${resource}")
}
}
/**
- * When the resource is instantiated, the total amount of resources actually occupied is returned.
- * 当资源被实例化后,返回实际占用的资源总量
- *
- * @param labels
- * In general, resourceReleased will release the resources occupied by the user, but if the process that uses the resource does not have time to call the resourceReleased method to die, you need to unregister to release the resource.
- * @param usedResource
- */
+ * When the resource is instantiated, the total amount of resources actually occupied is returned.
+ * 当资源被实例化后,返回实际占用的资源总量
+ *
+ * @param labels
+ * In general, resourceReleased will release the resources occupied by the user, but if the process that uses the resource does not have time to call the resourceReleased method to die, you need to unregister to release the resource.
+ * @param usedResource
+ */
override def resourceUsed(labels: util.List[Label[_]], usedResource: NodeResource): Unit = {
val labelContainer = labelResourceService.enrichLabels(new RMLabelContainer(labels))
var lockedResource: NodeResource = null
@@ -287,93 +320,147 @@
error(s"EngineInstanceLabel [${labelContainer.getEngineInstanceLabel}] cause NullPointerException")
throw e
}
- if (lockedResource == null) throw new RMErrorException(RMErrorConstants.RM_ERROR, s"No " +
- s"locked " +
- s"resource found by engine ${labelContainer.getEngineInstanceLabel}")
- if (!lockedResource.getLockedResource.equalsTo(usedResource.getUsedResource)) throw new RMErrorException(110022, s"Locked ${lockedResource.getLockedResource}, but want to use ${usedResource.getUsedResource}")
-
+ if (lockedResource == null) {
+ throw new RMErrorException(110021, s"No locked resource found by engine ${labelContainer.getEngineInstanceLabel}")
+ }
+ /*if (!lockedResource.getLockedResource.equalsTo(usedResource.getUsedResource)) {
+ throw new RMErrorException(110022, s"Locked ${lockedResource.getLockedResource}, but want to use ${usedResource.getUsedResource}")
+ }*/
+ info(s"resourceUsed ready:${labelContainer.getEMInstanceLabel.getServiceInstance}, used resource ${lockedResource.getLockedResource}")
+ val addedResource = Resource.initResource(lockedResource.getResourceType) + lockedResource.getLockedResource
Utils.tryFinally {
// lock labels
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
resourceLockService.tryLock(labelContainer)
case _ =>
}
-
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case engineInstanceLabel: EngineInstanceLabel =>
- lockedResource.setUsedResource(lockedResource.getLockedResource)
- lockedResource.setLockedResource(Resource.getZeroResource(lockedResource.getLockedResource))
- labelResourceService.setLabelResource(engineInstanceLabel, lockedResource)
+ Utils.tryCatch{
+ lockedResource.setUsedResource(lockedResource.getLockedResource)
+ lockedResource.setLockedResource(Resource.getZeroResource(lockedResource.getLockedResource))
+ labelResourceService.setLabelResource(engineInstanceLabel, lockedResource)
+ resourceLogService.success(ChangeType.ENGINE_INIT, engineInstanceLabel)
+ }{
+ case exception: Exception => {
+ resourceLogService.failed(ChangeType.ENGINE_INIT, engineInstanceLabel, null, exception)
+ throw exception
+ }
+ case _ =>
+ }
case label: Label[_] =>
- val labelResource = labelResourceService.getLabelResource(label)
- if(labelResource != null){
- labelResource.setLockedResource(labelResource.getLockedResource - usedResource.getUsedResource)
- if(null == labelResource.getUsedResource) labelResource.setUsedResource(Resource.initResource(labelResource.getResourceType))
- labelResource.setUsedResource(labelResource.getUsedResource + usedResource.getUsedResource)
- labelResourceService.setLabelResource(label, labelResource)
+ Utils.tryCatch{
+ val labelResource = labelResourceService.getLabelResource(label)
+ if(labelResource != null){
+ labelResource.setLockedResource(labelResource.getLockedResource - addedResource)
+ if(null == labelResource.getUsedResource) labelResource.setUsedResource(Resource.initResource(labelResource.getResourceType))
+ labelResource.setUsedResource(labelResource.getUsedResource + addedResource)
+ labelResourceService.setLabelResource(label, labelResource)
+ if(label.isInstanceOf[EMInstanceLabel]){
+ resourceLogService.success(ChangeType.ECM_RESOURCE_ADD, null, label.asInstanceOf[EMInstanceLabel])
+ }
+ }
+ }{
+ case exception: Exception => {
+ if(label.isInstanceOf[EMInstanceLabel]){
+ resourceLogService.failed(ChangeType.ECM_RESOURCE_ADD, null, label.asInstanceOf[EMInstanceLabel], exception)
+ }
+ }
}
case _ =>
}
}{
resourceLockService.unLock(labelContainer)
- info(s"Labels:$labels resourceInited Processed(处理完毕)")
+ }
+ }
+
+ def timeCheck(labelResource: NodeResource, usedResource: NodeResource) = {
+ if(labelResource.getCreateTime != null && usedResource.getCreateTime != null){
+ if(labelResource.getCreateTime.getTime > usedResource.getCreateTime.getTime){
+ throw new RMErrorException(10022,s"no need to clear this labelResource, labelResource:${labelResource} created time is after than usedResource:${usedResource}" +
+ s"无需清理该标签的资源,该标签资源的创建时间晚于已用资源的创建时间")
+ }
}
}
/**
- * Method called when the resource usage is released
- * 当资源使用完成释放后,调用的方法
- *
- * @param labels
- */
+ * Method called when the resource usage is released
+ * 当资源使用完成释放后,调用的方法
+ *
+ * @param labels
+ */
override def resourceReleased(labels: util.List[Label[_]]): Unit = {
val labelContainer = labelResourceService.enrichLabels(new RMLabelContainer(labels))
val usedResource = labelResourceService.getLabelResource(labelContainer.getEngineInstanceLabel)
- if(usedResource == null) throw new RMErrorException(RMErrorConstants.RM_ERROR, s"No used resource " +
- s"found by engine " +
- s"${labelContainer.getEngineInstanceLabel}")
+ if(usedResource == null){
+ throw new RMErrorException(110021, s"No used resource found by engine ${labelContainer.getEngineInstanceLabel}")
+ }
+ info(s"resourceRelease ready:${labelContainer.getEngineInstanceLabel.getServiceInstance},current node resource${usedResource}")
Utils.tryFinally {
// lock labels
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
resourceLockService.tryLock(labelContainer)
case _ =>
}
-
- labelContainer.getLabels.toArray.foreach {
- case engineInstanceLabel: EngineInstanceLabel =>
+ val tmpLabel = labelContainer.getLabels.find(_.isInstanceOf[EngineInstanceLabel]).getOrElse(null)
+ if(tmpLabel != null){
+ val engineInstanceLabel = tmpLabel.asInstanceOf[EngineInstanceLabel]
+ Utils.tryCatch {
labelResourceService.removeResourceByLabel(engineInstanceLabel)
+ resourceLogService.success(ChangeType.ENGINE_CLEAR, engineInstanceLabel,null)
+ }{
+ case exception: Exception => {
+ resourceLogService.failed(ChangeType.ENGINE_CLEAR, engineInstanceLabel, null, exception)
+ throw exception
+ }
+ case _ =>
+ }
+ }
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
- val labelResource = labelResourceService.getLabelResource(label)
- if(labelResource != null){
- if(null != usedResource.getUsedResource){
- labelResource.setUsedResource(labelResource.getUsedResource - usedResource.getUsedResource)
- labelResource.setLeftResource(labelResource.getLeftResource + usedResource.getUsedResource)
+ Utils.tryCatch{
+ val labelResource = labelResourceService.getLabelResource(label)
+ if(labelResource != null){
+ timeCheck(labelResource, usedResource)
+ if(null != usedResource.getUsedResource){
+ labelResource.setUsedResource(labelResource.getUsedResource - usedResource.getUsedResource)
+ labelResource.setLeftResource(labelResource.getLeftResource + usedResource.getUsedResource)
+ }
+ if(null != usedResource.getLockedResource){
+ labelResource.setLockedResource(labelResource.getLockedResource - usedResource.getLockedResource)
+ labelResource.setLeftResource(labelResource.getLeftResource + usedResource.getLockedResource)
+ }
+ labelResourceService.setLabelResource(label, labelResource)
+ if(label.isInstanceOf[EMInstanceLabel]){
+ resourceLogService.success(ChangeType.ECM_Resource_MINUS, null, label.asInstanceOf[EMInstanceLabel])
+ }
}
- if(null != usedResource.getLockedResource){
- labelResource.setLockedResource(labelResource.getLockedResource - usedResource.getLockedResource)
- labelResource.setLeftResource(labelResource.getLeftResource + usedResource.getLockedResource)
+ }{
+ case exception: Exception => {
+ if(label.isInstanceOf[EMInstanceLabel]){
+ resourceLogService.failed(ChangeType.ECM_Resource_MINUS, null, label.asInstanceOf[EMInstanceLabel], exception)
+ }
}
- labelResourceService.setLabelResource(label, labelResource)
+ case _ =>
}
case _ =>
}
} {
resourceLockService.unLock(labelContainer)
- info(s"Labels:$labels resourceReleased Processed(处理完毕)")
}
}
/**
- * If the IP and port are empty, return the resource status of all modules of a module
- * * Return the use of this instance resource if there is an IP and port
- *
- * @param serviceInstances
- * @return
- */
+ * If the IP and port are empty, return the resource status of all modules of a module
+ * * Return the use of this instance resource if there is an IP and port
+ *
+ * @param serviceInstances
+ * @return
+ */
@@ -412,6 +499,10 @@
override def run(): Unit = {
info(s"check locked resource of ${engineInstanceLabel}")
val resource = labelResourceService.getLabelResource(engineInstanceLabel)
+ if(resource == null){
+ info(s"${engineInstanceLabel} has no resource")
+ }
+ info(s"${engineInstanceLabel} has resource")
if(resource != null
&& resource.getLockedResource != null
&& resource.getLockedResource > Resource.getZeroResource(resource.getLockedResource)){
@@ -424,22 +515,41 @@
if(timeoutEMEngineResponse.getCanReleaseResource){
Utils.tryFinally {
// lock labels
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case label: Label[_] =>
labelContainer.setCurrentLabel(label.asInstanceOf[Label[_]])
resourceLockService.tryLock(labelContainer)
case _ =>
}
- labelContainer.getLabels.toArray.foreach {
+ labelContainer.getResourceLabels.foreach {
case engineInstanceLabel: EngineInstanceLabel =>
- labelResourceService.removeResourceByLabel(engineInstanceLabel)
- case label: Label[_] =>
- val labelResource = labelResourceService.getLabelResource(label)
- if(labelResource != null){
- labelResource.setLockedResource(labelResource.getLockedResource - resource.getLockedResource)
- labelResource.setLeftResource(labelResource.getLeftResource + resource.getLockedResource)
- labelResourceService.setLabelResource(label, labelResource)
+ Utils.tryCatch{
+ labelResourceService.removeResourceByLabel(engineInstanceLabel)
+ resourceLogService.success(ChangeType.ENGINE_CLEAR, engineInstanceLabel, null)
+ }{
+ case exception: Exception => {
+ resourceLogService.failed(ChangeType.ENGINE_CLEAR,engineInstanceLabel, null, exception)
+ throw exception
+ }
+ case _ =>
+ }
+ case emInstanceLabel: EMInstanceLabel =>
+ Utils.tryCatch{
+ val labelResource = labelResourceService.getLabelResource(emInstanceLabel)
+ if(labelResource != null){
+ labelResource.setLockedResource(labelResource.getLockedResource - resource.getLockedResource)
+ labelResource.setLeftResource(labelResource.getLeftResource + resource.getLockedResource)
+ labelResourceService.setLabelResource(emInstanceLabel, labelResource)
+ resourceLogService.success(ChangeType.ECM_Resource_MINUS, null, emInstanceLabel)
+ }else{
+ resourceLogService.failed(ChangeType.ECM_Resource_MINUS, null, emInstanceLabel)
+ }
+ }{
+ case exception: Exception => {
+ resourceLogService.failed(ChangeType.ECM_Resource_MINUS, null, emInstanceLabel, exception)
+ }
+ case _ =>
}
case _ =>
}
@@ -466,7 +576,7 @@
}
override def resourceReport(labels: util.List[Label[_]], reportResource: NodeResource): Unit = {
- //todo to complete resource report
- return
+ //TODO
+
}
}
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/LabelResourceServiceImpl.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/LabelResourceServiceImpl.scala
index d480d57..bc028c4 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/LabelResourceServiceImpl.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/LabelResourceServiceImpl.scala
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.linkis.resourcemanager.service.impl
import com.webank.wedatasphere.linkis.common.utils.Logging
-import com.webank.wedatasphere.linkis.manager.common.entity.persistence.PersistenceResource
+import com.webank.wedatasphere.linkis.manager.common.entity.persistence.{PersistenceLabel, PersistenceResource}
import com.webank.wedatasphere.linkis.manager.common.entity.resource.NodeResource
import com.webank.wedatasphere.linkis.manager.common.utils.ResourceUtils
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
@@ -48,23 +48,11 @@
private val labelFactory = LabelBuilderFactoryContext.getLabelBuilderFactory
override def getLabelResource(label: Label[_]): NodeResource = {
- label match {
-// case combinedLabel: CombinedLabel =>
-// val persistenceLabel = labelManagerPersistence.getLabelByKeyValue(combinedLabel.getLabelKey, combinedLabel.getStringValue)
-// resourceLabelService.getResourceByLabel(persistenceLabel)
- case _ =>
- return resourceLabelService.getResourceByLabel(label)
- }
+ resourceLabelService.getResourceByLabel(label)
}
override def setLabelResource(label: Label[_], nodeResource: NodeResource): Unit = {
- label match {
- case combinedLabel: CombinedLabel =>
- val persistenceLabel = labelManagerPersistence.getLabelByKeyValue(combinedLabel.getLabelKey, combinedLabel.getStringValue)
- resourceLabelService.setResourceToLabel(persistenceLabel, nodeResource)
- case _ =>
- resourceLabelService.setResourceToLabel(label, nodeResource)
- }
+ resourceLabelService.setResourceToLabel(label, nodeResource)
}
override def getResourcesByUser(user: String): Array[NodeResource] = {
@@ -72,7 +60,7 @@
}
override def enrichLabels(labelContainer: RMLabelContainer): RMLabelContainer = {
- return new RMLabelContainer(LabelUtils.distinctLabel(resourceLabelService.getResourceLabels(labelContainer.getLabels), labelContainer.getLabels))
+ new RMLabelContainer(labelContainer.getLabels)
}
override def removeResourceByLabel(label: Label[_]): Unit = {
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/ResourceLogService.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/ResourceLogService.scala
new file mode 100644
index 0000000..c2903e0
--- /dev/null
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/service/impl/ResourceLogService.scala
@@ -0,0 +1,135 @@
+package com.webank.wedatasphere.linkis.resourcemanager.service.impl
+
+import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.manager.common.entity.resource.{Resource, ResourceType}
+import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.manager.label.entity.em.EMInstanceLabel
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineInstanceLabel
+import com.webank.wedatasphere.linkis.resourcemanager.service.LabelResourceService
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.stereotype.Component
+
+@Component
+case class ResourceLogService() extends Logging{
+
+ @Autowired
+ var labelResourceService: LabelResourceService = _
+
+ private def printLog(changeType: String, status: String, engineLabel: EngineInstanceLabel = null, ecmLabel: EMInstanceLabel = null) :String = {
+ val logString = new StringBuilder(changeType + " ")
+ logString ++= (status + ", ")
+ if (engineLabel != null) {
+ val engineResource = labelResourceService.getLabelResource(engineLabel)
+ var usedResource = Resource.initResource(ResourceType.Default)
+ if (engineResource != null && engineResource.getUsedResource != null) {
+ usedResource = engineResource.getUsedResource
+ }
+ logString ++= ("engine current resource:")
+ logString ++= (engineLabel.getServiceInstance.getInstance)
+ logString ++= (usedResource.toJson + " ")
+ }
+ if (ecmLabel != null) {
+ val ecmResource = labelResourceService.getLabelResource(ecmLabel)
+ var usedResource = Resource.initResource(ResourceType.Default)
+ if (ecmResource != null && ecmResource.getUsedResource != null) {
+ usedResource = ecmResource.getUsedResource
+ }
+ logString ++= ("ecm current resource:")
+ logString ++= (ecmLabel.getServiceInstance.getInstance)
+ logString ++= (usedResource.toJson + " ")
+ }
+ logString.toString()
+ }
+
+ def failed(changeType: String, engineLabel: EngineInstanceLabel = null, ecmLabel: EMInstanceLabel = null, exception: Exception = null): Unit = {
+ if (changeType != null) {
+ val log: String = changeType match {
+ case ChangeType.ENGINE_INIT => {
+ printLog(changeType, ChangeType.FAILED, engineLabel, ecmLabel)
+ }
+ case ChangeType.ENGINE_CLEAR => {
+ printLog(changeType, ChangeType.FAILED, engineLabel, ecmLabel)
+ }
+ case ChangeType.ENGINE_RESOURCE_ADD => {
+ printLog(changeType, ChangeType.FAILED, engineLabel, ecmLabel)
+ }
+ case ChangeType.ENGINE_RESOURCE_MINUS => {
+ printLog(changeType, ChangeType.FAILED, engineLabel, ecmLabel)
+ }
+ case ChangeType.ECM_INIT => {
+ printLog(changeType, ChangeType.FAILED, null, ecmLabel)
+ }
+ case ChangeType.ECM_CLEAR => {
+ printLog(changeType, ChangeType.FAILED, null, ecmLabel)
+ }
+ case ChangeType.ECM_RESOURCE_ADD => {
+ printLog(changeType, ChangeType.FAILED, engineLabel, ecmLabel)
+ }
+ case ChangeType.ECM_Resource_MINUS => {
+ printLog(changeType, ChangeType.FAILED, engineLabel, ecmLabel)
+ }
+ case _ => " "
+ }
+ if(exception != null){
+ error(log, exception)
+ }else{
+ error(log)
+ }
+ }
+ }
+ def success(changeType: String, engineLabel: EngineInstanceLabel = null, ecmLabel: EMInstanceLabel = null): Unit = {
+ if (changeType != null) {
+ val log: String = changeType match {
+ case ChangeType.ENGINE_INIT => {
+ printLog(changeType, ChangeType.SUCCESS, engineLabel, ecmLabel)
+ }
+ case ChangeType.ENGINE_CLEAR => {
+ printLog(changeType, ChangeType.SUCCESS, engineLabel, ecmLabel)
+ }
+ case ChangeType.ENGINE_RESOURCE_ADD => {
+ printLog(changeType, ChangeType.SUCCESS, engineLabel, ecmLabel)
+ }
+ case ChangeType.ENGINE_RESOURCE_MINUS => {
+ printLog(changeType, ChangeType.SUCCESS, engineLabel, ecmLabel)
+ }
+ case ChangeType.ECM_INIT => {
+ printLog(changeType, ChangeType.SUCCESS, null, ecmLabel)
+ }
+ case ChangeType.ECM_CLEAR => {
+ printLog(changeType, ChangeType.SUCCESS, null, ecmLabel)
+ }
+ case ChangeType.ECM_RESOURCE_ADD => {
+ printLog(changeType, ChangeType.SUCCESS, engineLabel, ecmLabel)
+ }
+ case ChangeType.ECM_Resource_MINUS => {
+ printLog(changeType, ChangeType.SUCCESS, engineLabel, ecmLabel)
+ }
+ case _ =>" "
+ }
+ info(log)
+ }
+ }
+}
+
+object ChangeType {
+
+ val ENGINE_INIT = "EngineResourceInit"
+
+ val ENGINE_CLEAR = "EngineResourceClear"
+
+ val ENGINE_RESOURCE_ADD = "EngineResourceAdd"
+
+ val ENGINE_RESOURCE_MINUS = "EngineResourceMinus"
+
+ val ECM_INIT = "ECMResourceInit"
+
+ val ECM_CLEAR = "ECMResourceClear"
+
+ val ECM_RESOURCE_ADD = "ECMResourceAdd"
+
+ val ECM_Resource_MINUS = "ECMResourceMinus"
+
+ val SUCCESS = "success"
+
+ val FAILED = "failed"
+}
\ No newline at end of file
diff --git a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMUtils.scala b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMUtils.scala
index 88171be..427a23f 100644
--- a/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMUtils.scala
+++ b/linkis-computation-governance/linkis-manager/linkis-resource-manager/src/main/scala/com/webank/wedatasphere/linkis/resourcemanager/utils/RMUtils.scala
@@ -29,6 +29,7 @@
val MANAGER_KILL_ENGINE_EAIT = CommonVars("wds.linkis.manager.rm.kill.engine.wait", new TimeType("30s"))
+ val RM_REQUEST_ENABLE = CommonVars("wds.linkis.manager.rm.request.enable", true)
def deserializeResource(plainResource: String): Resource = {
read[Resource](plainResource)
@@ -49,16 +50,16 @@
persistenceResource
}
-/* def fromPersistenceResource(persistenceResource: PersistenceResource) : CommonNodeResource = {
- val nodeResource = new CommonNodeResource
- if(persistenceResource.getMaxResource != null) nodeResource.setMaxResource(deserializeResource(persistenceResource.getMaxResource))
- if(persistenceResource.getMinResource != null) nodeResource.setMinResource(deserializeResource(persistenceResource.getMinResource))
- if(persistenceResource.getLockedResource != null) nodeResource.setLockedResource(deserializeResource(persistenceResource.getLockedResource))
- if(persistenceResource.getExpectedResource != null) nodeResource.setMaxResource(deserializeResource(persistenceResource.getExpectedResource))
- if(persistenceResource.getLeftResource != null) nodeResource.setLeftResource(deserializeResource(persistenceResource.getLeftResource))
- nodeResource.setResourceType(ResourceType.valueOf(persistenceResource.getResourceType))
- nodeResource
- }*/
+ /* def fromPersistenceResource(persistenceResource: PersistenceResource) : CommonNodeResource = {
+ val nodeResource = new CommonNodeResource
+ if(persistenceResource.getMaxResource != null) nodeResource.setMaxResource(deserializeResource(persistenceResource.getMaxResource))
+ if(persistenceResource.getMinResource != null) nodeResource.setMinResource(deserializeResource(persistenceResource.getMinResource))
+ if(persistenceResource.getLockedResource != null) nodeResource.setLockedResource(deserializeResource(persistenceResource.getLockedResource))
+ if(persistenceResource.getExpectedResource != null) nodeResource.setMaxResource(deserializeResource(persistenceResource.getExpectedResource))
+ if(persistenceResource.getLeftResource != null) nodeResource.setLeftResource(deserializeResource(persistenceResource.getLeftResource))
+ nodeResource.setResourceType(ResourceType.valueOf(persistenceResource.getResourceType))
+ nodeResource
+ }*/
def aggregateNodeResource(firstNodeResource: NodeResource, secondNodeResource: NodeResource) : CommonNodeResource = {
if(firstNodeResource != null && secondNodeResource != null){
diff --git a/linkis-computation-governance/linkis-manager/pom.xml b/linkis-computation-governance/linkis-manager/pom.xml
index 28b1467..dc96039 100644
--- a/linkis-computation-governance/linkis-manager/pom.xml
+++ b/linkis-computation-governance/linkis-manager/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-computation-governance/pom.xml b/linkis-computation-governance/pom.xml
index bdce44e..5d3290d 100644
--- a/linkis-computation-governance/pom.xml
+++ b/linkis-computation-governance/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/hive/pom.xml
index 72c2527..a85b048 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
@@ -29,7 +29,7 @@
<artifactId>linkis-engineplugin-hive</artifactId>
<properties>
- <hive.version>1.2.1</hive.version>
+ <hive.version>2.3.3</hive.version>
</properties>
<dependencies>
@@ -267,6 +267,17 @@
</exclusions>
</dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>${httpclient.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpmime</artifactId>
+ <version>${httpmime.version}</version>
+ </dependency>
+
</dependencies>
<build>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/java/com/webank/wedatasphere/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/java/com/webank/wedatasphere/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
new file mode 100644
index 0000000..fcb132b
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/java/com/webank/wedatasphere/linkis/engineplugin/hive/serde/CustomerDelimitedJSONSerDe.java
@@ -0,0 +1,290 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.engineplugin.hive.serde;
+
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.ByteStream;
+import org.apache.hadoop.hive.serde2.SerDeException;
+import org.apache.hadoop.hive.serde2.SerDeUtils;
+import org.apache.hadoop.hive.serde2.io.*;
+import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
+import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
+import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
+import org.apache.hadoop.hive.serde2.objectinspector.*;
+import org.apache.hadoop.hive.serde2.objectinspector.primitive.*;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+public class CustomerDelimitedJSONSerDe extends LazySimpleSerDe {
+ public static final Logger LOG = LoggerFactory.getLogger(CustomerDelimitedJSONSerDe.class.getName());
+ public static byte[] trueBytes = {(byte) 't', 'r', 'u', 'e'};
+ public static byte[] falseBytes = {(byte) 'f', 'a', 'l', 's', 'e'};
+ public CustomerDelimitedJSONSerDe() throws SerDeException {
+ }
+
+ /**
+ * Not implemented.
+ */
+ @Override
+ public Object doDeserialize(Writable field) throws SerDeException {
+ LOG.error("DelimitedJSONSerDe cannot deserialize.");
+ throw new SerDeException("DelimitedJSONSerDe cannot deserialize.");
+ }
+
+ @Override
+ protected void serializeField(ByteStream.Output out, Object obj, ObjectInspector objInspector,
+ LazySerDeParameters serdeParams) throws SerDeException {
+ if (!objInspector.getCategory().equals(ObjectInspector.Category.PRIMITIVE) || (objInspector.getTypeName().equalsIgnoreCase(serdeConstants.BINARY_TYPE_NAME))) {
+ //do this for all complex types and binary
+ try {
+ serialize(out, SerDeUtils.getJSONString(obj, objInspector, serdeParams.getNullSequence().toString()),
+ PrimitiveObjectInspectorFactory.javaStringObjectInspector, serdeParams.getSeparators(),
+ 1, serdeParams.getNullSequence(), serdeParams.isEscaped(), serdeParams.getEscapeChar(),
+ serdeParams.getNeedsEscape());
+
+ } catch (IOException e) {
+ throw new SerDeException(e);
+ }
+
+ } else {
+ //primitives except binary
+ try {
+ serialize(out, obj, objInspector, serdeParams.getSeparators(), 1, serdeParams.getNullSequence(), serdeParams.isEscaped(), serdeParams.getEscapeChar(), serdeParams.getNeedsEscape());
+ } catch (IOException e) {
+ throw new SerDeException(e);
+ }
+ }
+ }
+
+
+ public static void serialize(ByteStream.Output out, Object obj, ObjectInspector objInspector, byte[] separators, int level, Text nullSequence, boolean escaped, byte escapeChar, boolean[] needsEscape) throws IOException, SerDeException {
+ if (obj == null) {
+ out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
+ } else {
+ char separator;
+ List list;
+ switch(objInspector.getCategory()) {
+ case PRIMITIVE:
+ writePrimitiveUTF8(out, obj, (PrimitiveObjectInspector)objInspector, escaped, escapeChar, needsEscape);
+ return;
+ case LIST:
+ separator = (char)getSeparator(separators, level);
+ ListObjectInspector loi = (ListObjectInspector)objInspector;
+ list = loi.getList(obj);
+ ObjectInspector eoi = loi.getListElementObjectInspector();
+ if (list == null) {
+ out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
+ } else {
+ for(int i = 0; i < list.size(); ++i) {
+ if (i > 0) {
+ out.write(separator);
+ }
+
+ serialize(out, list.get(i), eoi, separators, level + 1, nullSequence, escaped, escapeChar, needsEscape);
+ }
+ }
+
+ return;
+ case MAP:
+ separator = (char)getSeparator(separators, level);
+ char keyValueSeparator = (char)getSeparator(separators, level + 1);
+ MapObjectInspector moi = (MapObjectInspector)objInspector;
+ ObjectInspector koi = moi.getMapKeyObjectInspector();
+ ObjectInspector voi = moi.getMapValueObjectInspector();
+ Map<?, ?> map = moi.getMap(obj);
+ if (map == null) {
+ out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
+ } else {
+ boolean first = true;
+ Iterator var24 = map.entrySet().iterator();
+
+ while(var24.hasNext()) {
+ Map.Entry<?, ?> entry = (Map.Entry)var24.next();
+ if (first) {
+ first = false;
+ } else {
+ out.write(separator);
+ }
+
+ serialize(out, entry.getKey(), koi, separators, level + 2, nullSequence, escaped, escapeChar, needsEscape);
+ out.write(keyValueSeparator);
+ serialize(out, entry.getValue(), voi, separators, level + 2, nullSequence, escaped, escapeChar, needsEscape);
+ }
+ }
+
+ return;
+ case STRUCT:
+ separator = (char)getSeparator(separators, level);
+ StructObjectInspector soi = (StructObjectInspector)objInspector;
+ List<? extends StructField> fields = soi.getAllStructFieldRefs();
+ list = soi.getStructFieldsDataAsList(obj);
+ if (list == null) {
+ out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
+ } else {
+ for(int i = 0; i < list.size(); ++i) {
+ if (i > 0) {
+ out.write(separator);
+ }
+
+ serialize(out, list.get(i), ((StructField)fields.get(i)).getFieldObjectInspector(), separators, level + 1, nullSequence, escaped, escapeChar, needsEscape);
+ }
+ }
+
+ return;
+ case UNION:
+ separator = (char)getSeparator(separators, level);
+ UnionObjectInspector uoi = (UnionObjectInspector)objInspector;
+ List<? extends ObjectInspector> ois = uoi.getObjectInspectors();
+ if (ois == null) {
+ out.write(nullSequence.getBytes(), 0, nullSequence.getLength());
+ } else {
+ LazyUtils.writePrimitiveUTF8(out, new Byte(uoi.getTag(obj)), PrimitiveObjectInspectorFactory.javaByteObjectInspector, escaped, escapeChar, needsEscape);
+ out.write(separator);
+ serialize(out, uoi.getField(obj), (ObjectInspector)ois.get(uoi.getTag(obj)), separators, level + 1, nullSequence, escaped, escapeChar, needsEscape);
+ }
+
+ return;
+ default:
+ throw new RuntimeException("Unknown category type: " + objInspector.getCategory());
+ }
+ }
+ }
+ private static void writePrimitiveUTF8(OutputStream out, Object o,
+ PrimitiveObjectInspector oi, boolean escaped, byte escapeChar,
+ boolean[] needsEscape) throws IOException {
+
+ PrimitiveObjectInspector.PrimitiveCategory category = oi.getPrimitiveCategory();
+ byte[] binaryData = null;
+ switch (category) {
+ case BOOLEAN: {
+ boolean b = ((BooleanObjectInspector) oi).get(o);
+ if (b) {
+ binaryData = Base64.encodeBase64(trueBytes);
+ } else {
+ binaryData = Base64.encodeBase64(falseBytes);
+ }
+ break;
+ }
+ case BYTE: {
+ binaryData = Base64.encodeBase64(String.valueOf(((ByteObjectInspector) oi).get(o)).getBytes());
+ break;
+ }
+ case SHORT: {
+ binaryData = Base64.encodeBase64(String.valueOf(((ShortObjectInspector) oi).get(o)).getBytes());
+ break;
+ }
+ case INT: {
+ binaryData = Base64.encodeBase64(String.valueOf(((IntObjectInspector) oi).get(o)).getBytes());
+ break;
+ }
+ case LONG: {
+ binaryData = Base64.encodeBase64(String.valueOf(((LongObjectInspector) oi).get(o)).getBytes());
+ break;
+ }
+ case FLOAT: {
+ binaryData = Base64.encodeBase64(String.valueOf(((FloatObjectInspector) oi).get(o)).getBytes());
+ break;
+ }
+ case DOUBLE: {
+ binaryData = Base64.encodeBase64(String.valueOf(((DoubleObjectInspector) oi).get(o)).getBytes());
+ break;
+ }
+ case STRING: {
+ binaryData = Base64.encodeBase64(((StringObjectInspector) oi).getPrimitiveWritableObject(o).getBytes());
+ break;
+ }
+ case CHAR: {
+ HiveCharWritable hc = ((HiveCharObjectInspector) oi).getPrimitiveWritableObject(o);
+ binaryData = Base64.encodeBase64(String.valueOf(hc).getBytes());
+ break;
+ }
+ case VARCHAR: {
+ HiveVarcharWritable hc = ((HiveVarcharObjectInspector)oi).getPrimitiveWritableObject(o);
+ binaryData = Base64.encodeBase64(String.valueOf(hc).getBytes());
+ break;
+ }
+ case BINARY: {
+ BytesWritable bw = ((BinaryObjectInspector) oi).getPrimitiveWritableObject(o);
+ byte[] toEncode = new byte[bw.getLength()];
+ System.arraycopy(bw.getBytes(), 0,toEncode, 0, bw.getLength());
+ byte[] toWrite = Base64.encodeBase64(toEncode);
+ out.write(toWrite, 0, toWrite.length);
+ break;
+ }
+ case DATE: {
+ DateWritable dw = ((DateObjectInspector) oi).getPrimitiveWritableObject(o);
+ binaryData = Base64.encodeBase64(String.valueOf(dw).getBytes());
+ break;
+ }
+ case TIMESTAMP: {
+ TimestampWritable tw = ((TimestampObjectInspector) oi).getPrimitiveWritableObject(o);
+ binaryData = Base64.encodeBase64(String.valueOf(tw).getBytes());
+ break;
+ }
+ case INTERVAL_YEAR_MONTH: {
+ HiveIntervalYearMonthWritable hw = ((HiveIntervalYearMonthObjectInspector) oi).getPrimitiveWritableObject(o);
+ binaryData = Base64.encodeBase64(String.valueOf(hw).getBytes());
+ break;
+ }
+ case INTERVAL_DAY_TIME: {
+ HiveIntervalDayTimeWritable ht = ((HiveIntervalDayTimeObjectInspector) oi).getPrimitiveWritableObject(o);
+ binaryData = Base64.encodeBase64(String.valueOf(ht).getBytes());
+ break;
+ }
+ case DECIMAL: {
+ HiveDecimalObjectInspector decimalOI = (HiveDecimalObjectInspector) oi;
+ binaryData = Base64.encodeBase64(String.valueOf(decimalOI).getBytes());
+ break;
+ }
+ default: {
+ throw new RuntimeException("Unknown primitive type: " + category);
+ }
+ }
+ if(binaryData == null){
+ throw new RuntimeException("get primitive type is null: " + category);
+ }
+ out.write(binaryData,0,binaryData.length);
+ }
+
+ static byte getSeparator(byte[] separators, int level) throws SerDeException {
+ try {
+ return separators[level];
+ } catch (ArrayIndexOutOfBoundsException var5) {
+ String msg = "Number of levels of nesting supported for LazySimpleSerde is " + (separators.length - 1) + " Unable to work with level " + level;
+ String txt = ". Use %s serde property for tables using LazySimpleSerde.";
+ if (separators.length < 9) {
+ msg = msg + String.format(txt, "hive.serialization.extend.nesting.levels");
+ } else if (separators.length < 25) {
+ msg = msg + String.format(txt, "hive.serialization.extend.additional.nesting.levels");
+ }
+
+ throw new SerDeException(msg, var5);
+ }
+ }
+}
+
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties
index 09d77ed..7b06ef9 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/linkis-engineconn.properties
@@ -24,4 +24,6 @@
wds.linkis.engineconn.plugin.default.class=com.webank.wedatasphere.linkis.engineplugin.hive.HiveEngineConnPlugin
+wds.linkis.bdp.hive.init.sql.enable=true
+
wds.linkis.engine.connector.hooks=com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook,com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.JarUdfEngineHook
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/log4j2-engineconn.xml
index 6343fa3..c52a166 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/resources/log4j2-engineconn.xml
@@ -21,14 +21,23 @@
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Console>
<Send name="Send" >
+ <Filters>
+ <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY" />
+ </Filters>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Send>
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
<root level="INFO">
<appender-ref ref="Console"/>
<appender-ref ref="Send"/>
</root>
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+ <appender-ref ref="stderr"/>
+ </logger>
<logger name="com.netflix.discovery" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala
index 644ccb4..2d4263a 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/conf/HiveEngineConfiguration.scala
@@ -21,5 +21,6 @@
object HiveEngineConfiguration {
val HIVE_LIB_HOME = CommonVars[String]("hive.lib", CommonVars[String]("HIVE_LIB", "/appcom/Install/hive/lib").getValue)
-
-}
\ No newline at end of file
+ val ENABLE_FETCH_BASE64 = CommonVars[Boolean]("wds.linkis.hive.enable.fetch.base64",true).getValue
+ val BASE64_SERDE_CLASS = CommonVars[String]("wds.linkis.hive.base64.serde.class","com.webank.wedatasphere.linkis.engineplugin.hive.serde.CustomerDelimitedJSONSerDe").getValue
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
index 7dec7e6..c2ec03c 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/creation/HiveEngineConnFactory.scala
@@ -59,11 +59,17 @@
hiveConf.setVar(HiveConf.ConfVars.HIVEJAR, HiveUtils.jarOfClass(classOf[Driver])
.getOrElse(throw HiveSessionStartFailedException(40012, "cannot find hive-exec.jar, start session failed!")))
options.foreach { case (k, v) => info(s"key is $k, value is $v") }
- options.filter { case (k, v) => k.startsWith("hive.") || k.startsWith("mapreduce.") || k.startsWith("wds.linkis.") }.foreach { case (k, v) =>
+ options.filter { case (k, v) => k.startsWith("hive.") || k.startsWith("mapreduce.") || k.startsWith("mapred.reduce.") || k.startsWith("wds.linkis.") }.foreach { case (k, v) =>
info(s"key is $k, value is $v")
if (BDP_QUEUE_NAME.equals(k)) hiveConf.set(HIVE_QUEUE_NAME, v) else hiveConf.set(k, v)
}
hiveConf.setVar(HiveConf.ConfVars.HIVE_HADOOP_CLASSPATH, HiveEngineConfiguration.HIVE_LIB_HOME.getValue + "/*")
+ if(HiveEngineConfiguration.ENABLE_FETCH_BASE64){
+ hiveConf.setVar(HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE,HiveEngineConfiguration.BASE64_SERDE_CLASS)
+ hiveConf.set("enable_fetch_base64","true")
+ }else{
+ hiveConf.set("enable_fetch_base64","false")
+ }
/* //Update by peaceWong add hook to HiveDriver
if (StringUtils.isNotBlank(EnvConfiguration.LINKIS_HIVE_POST_HOOKS)) {
val hooks = if (StringUtils.isNotBlank(hiveConf.get("hive.exec.post.hooks"))) {
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
index 4477f88..76d44c1 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/executor/HiveEngineConnExecutor.scala
@@ -13,11 +13,6 @@
package com.webank.wedatasphere.linkis.engineplugin.hive.executor
-import java.io.ByteArrayOutputStream
-import java.security.PrivilegedExceptionAction
-import java.util
-import java.util.concurrent.atomic.AtomicBoolean
-
import com.webank.wedatasphere.linkis.common.exception.ErrorException
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.{ComputationExecutor, EngineExecutionContext}
@@ -35,7 +30,9 @@
import com.webank.wedatasphere.linkis.storage.resultset.ResultSetFactory
import com.webank.wedatasphere.linkis.storage.resultset.table.{TableMetaData, TableRecord}
import org.apache.commons.io.IOUtils
+import org.apache.hadoop.hive.common.HiveInterruptUtils
import org.apache.hadoop.hive.conf.HiveConf
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars
import org.apache.hadoop.hive.metastore.api.{FieldSchema, Schema}
import org.apache.hadoop.hive.ql.Driver
import org.apache.hadoop.hive.ql.exec.mr.HadoopJobExecHelper
@@ -45,7 +42,12 @@
import org.apache.hadoop.security.UserGroupInformation
import org.slf4j.LoggerFactory
+import java.io.ByteArrayOutputStream
+import java.security.PrivilegedExceptionAction
+import java.util
+import java.util.concurrent.atomic.AtomicBoolean
import scala.collection.JavaConversions._
+import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
class HiveEngineConnExecutor(id: Int,
@@ -60,49 +62,58 @@
private var proc: CommandProcessor = _
- private var map:Int = 0
+ private var map: Int = 0
- private var reduce:Int = 0
+ private var reduce: Int = 0
private val totalTask = 200.0f
- private var singleLineProgress:Float = 0.0f
+ private var singleLineProgress: Float = 0.0f
- private var stage:Int = 0
+ private var stage: Int = 0
private var engineExecutorContext: EngineExecutionContext = _
private val singleCodeCompleted: AtomicBoolean = new AtomicBoolean(false)
- private var numberOfMRJobs:Int = 0
+ private var numberOfMRJobs: Int = 0
- private var currentSqlProgress:Float = 0.0f
+ private var currentSqlProgress: Float = 0.0f
- private val singleSqlProgressMap:util.Map[String, Float] = new util.HashMap[String, Float]()
+ private val singleSqlProgressMap: util.Map[String, Float] = new util.HashMap[String, Float]()
private val executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]]()
- override def init():Unit = {
+ private var driver: HiveDriverProxy = _
+
+ private var thread: Thread = _
+
+ override def init(): Unit = {
LOG.info(s"Ready to change engine state!")
setCodeParser(new SQLCodeParser)
super.init()
}
override def executeLine(engineExecutorContext: EngineExecutionContext, code: String): ExecuteResponse = {
+ this.engineExecutorContext = engineExecutorContext
+ if (engineExecutorContext.getEnableResultsetMetaWithTableName) {
+ hiveConf.setBoolVar(ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES, true)
+ info("set HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES true")
+ } else {
+ hiveConf.setBoolVar(ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES, false)
+ }
CSHiveHelper.setContextIDInfoToHiveConf(engineExecutorContext, hiveConf)
-
- onComplete()
singleSqlProgressMap.clear()
singleCodeCompleted.set(false)
currentSqlProgress = 0.0f
- var realCode = code.trim()
+ val realCode = code.trim()
LOG.info(s"hive client begins to run hql code:\n ${realCode.trim}")
if (realCode.trim.length > 500) {
engineExecutorContext.appendStdout(s"$getId >> ${realCode.trim.substring(0, 500)} ...")
} else engineExecutorContext.appendStdout(s"$getId >> ${realCode.trim}")
val tokens = realCode.trim.split("""\s+""")
SessionState.setCurrentSessionState(sessionState)
- this.engineExecutorContext = engineExecutorContext
+
val proc = CommandProcessorFactory.get(tokens, hiveConf)
this.proc = proc
LOG.debug("ugi is " + ugi.getUserName)
@@ -110,125 +121,16 @@
override def run(): ExecuteResponse = {
proc match {
case any if HiveDriverProxy.isDriver(any) =>
- import scala.collection.JavaConversions._
- val driver = new HiveDriverProxy(any)
- var needRetry: Boolean = true
- var tryCount: Int = 0
- var hasResult: Boolean = false
- var rows: Int = 0
- var columnCount: Int = 0
- while (needRetry) {
- needRetry = false
- driver.setTryCount(tryCount + 1)
- val startTime = System.currentTimeMillis()
- try{
- if (numberOfMRJobs > 0) engineExecutorContext.appendStdout(s"Your hive sql has $numberOfMRJobs MR jobs to do")
- val hiveResponse: CommandProcessorResponse = driver.run(realCode)
- if (hiveResponse.getResponseCode != 0) {
- LOG.error("Hive query failed, response code is {}", hiveResponse.getResponseCode)
- return ErrorExecuteResponse(hiveResponse.getErrorMessage, hiveResponse.getException)
- }
- engineExecutorContext.appendStdout(s"Time taken: ${Utils.msDurationToString(System.currentTimeMillis() - startTime)}, begin to fetch results.")
- LOG.info(s"$getId >> Time taken: ${Utils.msDurationToString(System.currentTimeMillis() - startTime)}, begin to fetch results.")
- val fieldSchemas = if (hiveResponse.getSchema != null) hiveResponse.getSchema.getFieldSchemas
- else if (driver.getSchema != null) driver.getSchema.getFieldSchemas
- else throw HiveQueryFailedException(41005, "cannot get the field schemas.")
- LOG.debug("fieldSchemas are " + fieldSchemas)
- if (fieldSchemas == null || isNoResultSql(realCode)) {
- //IOUtils.closeQuietly(resultSetWriter)
- numberOfMRJobs = -1
- singleCodeCompleted.set(true)
- onComplete()
- singleSqlProgressMap.clear()
- return SuccessExecuteResponse()
- }
- import scala.collection.mutable
- var results:util.List[FieldSchema] = null
- val nameSet = new mutable.HashSet[String]()
- val cleanSchema = new util.ArrayList[FieldSchema]()
- fieldSchemas foreach {
- fieldSchema => val name = fieldSchema.getName
- if (name.split('.').length == 2){
- nameSet.add(name.split('.')(1))
- cleanSchema += new FieldSchema(name.split('.')(1), fieldSchema.getType, fieldSchema.getComment)
- }
- }
- if(nameSet.size < fieldSchemas.length){
- results = fieldSchemas
- } else {
- results = cleanSchema
- }
- hasResult = true
- val columns = results.map(result => Column(result.getName,
- DataType.toDataType(result.getType.toLowerCase()), result.getComment)).toArray[Column]
- val metaData = new TableMetaData(columns)
- val resultSetWriter = engineExecutorContext.createResultSetWriter(ResultSetFactory.TABLE_TYPE)
- resultSetWriter.addMetaData(metaData)
- val result = new util.ArrayList[String]()
- while(driver.getResults(result)){
- val scalaResult:scala.collection.mutable.Buffer[String] = result
- scalaResult foreach { s =>
- val arr:Array[String] = s.split("\t")
- val arrAny:ArrayBuffer[Any] = new ArrayBuffer[Any]()
- if (arr.length > columns.length){
- logger.error(s"Tab characters are present in the results of your query Hive cannot cut, use Spark to do so (hive code ${realCode} 查询的结果中有\t制表符,hive不能进行切割,请使用spark执行)")
- throw new ErrorException(60078, "Tab characters are present in the results of your query Hive cannot cut, use Spark to do so(您查询的结果中有\t制表符,hive不能进行切割,请使用spark执行)")
- }
- if (arr.length == columns.length) arr foreach arrAny.add
- else if(arr.length == 0) for(i <-1 to columns.length) arrAny add ""
- else {
- val i = columns.length - arr.length
- arr foreach arrAny.add
- for (i <- 1 to i) arrAny add ""
- }
- resultSetWriter.addRecord(new TableRecord(arrAny.toArray))
- }
- rows += result.size
- result.clear()
- rows += result.size
- result.clear()
- }
- columnCount = if (fieldSchemas != null) fieldSchemas.size() else 0
- engineExecutorContext.sendResultSet(resultSetWriter)
- IOUtils.closeQuietly(resultSetWriter)
- }catch{
- case e if HiveDriverProxy.isCommandNeedRetryException(e) => tryCount += 1
- needRetry = true
- HiveProgressHelper.clearHiveProgress()
- onComplete()
- singleSqlProgressMap.clear()
- clearCurrentProgress()
- HiveProgressHelper.storeSingleSQLProgress(0.0f)
- LOG.warn("Retry hive query with a different approach...")
- case t: Throwable => LOG.error(s"query failed, reason : ", t)
- HiveProgressHelper.clearHiveProgress()
- clearCurrentProgress()
- HiveProgressHelper.storeSingleSQLProgress(0.0f)
- singleCodeCompleted.set(true)
- numberOfMRJobs = -1
- onComplete()
- singleSqlProgressMap.clear()
- return ErrorExecuteResponse(t.getMessage, t)
- }
- }
- if (hasResult) {
- engineExecutorContext.appendStdout(s"Fetched $columnCount col(s) : $rows row(s) in hive")
- LOG.info(s"$getId >> Fetched $columnCount col(s) : $rows row(s) in hive")
- }
- clearCurrentProgress()
- HiveProgressHelper.clearHiveProgress()
- HiveProgressHelper.storeSingleSQLProgress(0.0f)
- singleCodeCompleted.set(true)
- numberOfMRJobs = -1
- onComplete()
- singleSqlProgressMap.clear()
- SuccessExecuteResponse()
- case _ => val resp = proc.run(realCode.substring(tokens(0).length).trim)
+ info(s"driver is $any")
+ thread = Thread.currentThread()
+ driver = new HiveDriverProxy(any)
+ executeHQL(realCode, driver)
+ case _ => val resp = proc.run(realCode.substring(tokens(0).length).trim)
val result = new String(baos.toByteArray)
logger.info("RESULT => {}", result)
engineExecutorContext.appendStdout(result)
baos.reset()
- if(resp.getResponseCode != 0) {
+ if (resp.getResponseCode != 0) {
clearCurrentProgress()
HiveProgressHelper.clearHiveProgress()
onComplete()
@@ -246,17 +148,152 @@
})
}
- private def isNoResultSql(sql:String):Boolean = {
+ private def executeHQL(realCode: String, driver: HiveDriverProxy): ExecuteResponse = {
+ var needRetry: Boolean = true
+ var tryCount: Int = 0
+ var hasResult: Boolean = false
+ var rows: Int = 0
+ var columnCount: Int = 0
+ while (needRetry) {
+ needRetry = false
+ driver.setTryCount(tryCount + 1)
+ val startTime = System.currentTimeMillis()
+ try {
+ if (numberOfMRJobs > 0) engineExecutorContext.appendStdout(s"Your hive sql has $numberOfMRJobs MR jobs to do")
+ val hiveResponse: CommandProcessorResponse = driver.run(realCode)
+ if (hiveResponse.getResponseCode != 0) {
+ LOG.error("Hive query failed, response code is {}", hiveResponse.getResponseCode)
+ // todo check uncleared context ?
+ return ErrorExecuteResponse(hiveResponse.getErrorMessage, hiveResponse.getException)
+ }
+ engineExecutorContext.appendStdout(s"Time taken: ${Utils.msDurationToString(System.currentTimeMillis() - startTime)}, begin to fetch results.")
+ LOG.info(s"$getId >> Time taken: ${Utils.msDurationToString(System.currentTimeMillis() - startTime)}, begin to fetch results.")
+
+ val fieldSchemas = if (hiveResponse.getSchema != null) hiveResponse.getSchema.getFieldSchemas
+ else if (driver.getSchema != null) driver.getSchema.getFieldSchemas
+ else throw HiveQueryFailedException(41005, "cannot get the field schemas.")
+
+ LOG.debug("fieldSchemas are " + fieldSchemas)
+ if (fieldSchemas == null || isNoResultSql(realCode)) {
+ //IOUtils.closeQuietly(resultSetWriter)
+ numberOfMRJobs = -1
+ singleCodeCompleted.set(true)
+ onComplete()
+ singleSqlProgressMap.clear()
+ return SuccessExecuteResponse()
+ }
+ //get column data
+ val metaData: TableMetaData = getResultMetaData(fieldSchemas, engineExecutorContext.getEnableResultsetMetaWithTableName)
+ //send result
+ rows = sendResultSet(engineExecutorContext, driver, metaData)
+ columnCount = if (fieldSchemas != null) fieldSchemas.size() else 0
+ hasResult = true
+ } catch {
+ case e if HiveDriverProxy.isCommandNeedRetryException(e) => tryCount += 1
+ needRetry = true
+ HiveProgressHelper.clearHiveProgress()
+ onComplete()
+ singleSqlProgressMap.clear()
+ clearCurrentProgress()
+ HiveProgressHelper.storeSingleSQLProgress(0.0f)
+ LOG.warn("Retry hive query with a different approach...")
+ case t: Throwable => LOG.error(s"query failed, reason : ", t)
+ HiveProgressHelper.clearHiveProgress()
+ clearCurrentProgress()
+ HiveProgressHelper.storeSingleSQLProgress(0.0f)
+ singleCodeCompleted.set(true)
+ numberOfMRJobs = -1
+ onComplete()
+ singleSqlProgressMap.clear()
+ return ErrorExecuteResponse(t.getMessage, t)
+ }
+ }
+ if (hasResult) {
+ engineExecutorContext.appendStdout(s"Fetched $columnCount col(s) : $rows row(s) in hive")
+ LOG.info(s"$getId >> Fetched $columnCount col(s) : $rows row(s) in hive")
+ }
+ clearCurrentProgress()
+ HiveProgressHelper.clearHiveProgress()
+ HiveProgressHelper.storeSingleSQLProgress(0.0f)
+ singleCodeCompleted.set(true)
+ numberOfMRJobs = -1
+ onComplete()
+ singleSqlProgressMap.clear()
+ SuccessExecuteResponse()
+ }
+
+ private def sendResultSet(engineExecutorContext: EngineExecutionContext, driver: HiveDriverProxy, metaData: TableMetaData): Int = {
+ val resultSetWriter = engineExecutorContext.createResultSetWriter(ResultSetFactory.TABLE_TYPE)
+ resultSetWriter.addMetaData(metaData)
+ val colLength = metaData.columns.length
+ val result = new util.ArrayList[String]()
+ var rows = 0
+ while (driver.getResults(result)) {
+ val scalaResult: mutable.Buffer[String] = result
+ scalaResult foreach { s =>
+ val arr: Array[String] = s.split("\t")
+ val arrAny: ArrayBuffer[Any] = new ArrayBuffer[Any]()
+ if (arr.length > colLength) {
+ logger.error(s"""hive code 查询的结果中有\t制表符,hive不能进行切割,请使用spark执行""")
+ throw new ErrorException(60078, """您查询的结果中有\t制表符,hive不能进行切割,请使用spark执行""")
+ }
+ if (arr.length == colLength) arr foreach arrAny.add
+ else if (arr.length == 0) for (i <- 1 to colLength) arrAny add ""
+ else {
+ val i = colLength - arr.length
+ arr foreach arrAny.add
+ for (i <- 1 to i) arrAny add ""
+ }
+ resultSetWriter.addRecord(new TableRecord(arrAny.toArray))
+ }
+ rows += result.size
+ result.clear()
+ }
+ engineExecutorContext.sendResultSet(resultSetWriter)
+ IOUtils.closeQuietly(resultSetWriter)
+ rows
+ }
+
+ private def getResultMetaData(fieldSchemas: util.List[FieldSchema], useTableName: Boolean): TableMetaData = {
+ var results: util.List[FieldSchema] = null
+ val nameSet = new mutable.HashSet[String]()
+ val cleanSchema = new util.ArrayList[FieldSchema]()
+ fieldSchemas foreach {
+ fieldSchema =>
+ val name = fieldSchema.getName
+ if (name.split('.').length == 2) {
+ nameSet.add(name.split('.')(1))
+ cleanSchema += new FieldSchema(name.split('.')(1), fieldSchema.getType, fieldSchema.getComment)
+ }
+ }
+ if (nameSet.size < fieldSchemas.length) {
+ results = fieldSchemas
+ } else {
+ if (useTableName) {
+ results = fieldSchemas
+ } else {
+ results = cleanSchema
+ }
+ }
+
+ val columns = results.map(result => Column(result.getName,
+ DataType.toDataType(result.getType.toLowerCase()), result.getComment)).toArray[Column]
+ val metaData = new TableMetaData(columns)
+ metaData
+ }
+
+
+ private def isNoResultSql(sql: String): Boolean = {
if (sql.trim.startsWith("create table") || sql.trim.startsWith("drop table")) true else false
}
/**
- * 在job完成之前,要将singleSqlProgressMap的剩余的内容全部变为成功
- */
- private def onComplete():Unit = {
- if (engineExecutorContext != null){
- val arrayBuffer:ArrayBuffer[JobProgressInfo] = new ArrayBuffer[JobProgressInfo]()
+ * 在job完成之前,要将singleSqlProgressMap的剩余的内容全部变为成功
+ */
+ private def onComplete(): Unit = {
+ if (engineExecutorContext != null) {
+ val arrayBuffer: ArrayBuffer[JobProgressInfo] = new ArrayBuffer[JobProgressInfo]()
singleSqlProgressMap foreach {
case (jobId, progress) => arrayBuffer += JobProgressInfo(jobId, 200, 0, 0, 200)
}
@@ -265,14 +302,14 @@
}
- private def clearCurrentProgress():Unit = {
+ private def clearCurrentProgress(): Unit = {
reduce = 0
map = 0
singleLineProgress = 0.0f
}
- private def justFieldName(schemaName:String):String = {
+ private def justFieldName(schemaName: String): String = {
LOG.debug("schemaName: " + schemaName)
val arr = schemaName.split("\\.")
if (arr.length == 2) arr(1) else schemaName
@@ -292,53 +329,56 @@
override def progress(): Float = {
- if (engineExecutorContext != null){
+ if (engineExecutorContext != null) {
val totalSQLs = engineExecutorContext.getTotalParagraph
val currentSQL = engineExecutorContext.getCurrentParagraph
val currentBegin = (currentSQL - 1) / totalSQLs.asInstanceOf[Float]
HadoopJobExecHelper.runningJobs synchronized {
HadoopJobExecHelper.runningJobs foreach {
- runningJob => val name = runningJob.getID.toString
+ runningJob =>
+ val name = runningJob.getID.toString
val _progress = runningJob.reduceProgress() + runningJob.mapProgress()
singleSqlProgressMap.put(name, _progress / 2)
}
}
- var totalProgress:Float = 0.0F
+ var totalProgress: Float = 0.0F
singleSqlProgressMap foreach {
case (_name, _progress) => totalProgress += _progress
}
- try{
+ try {
totalProgress = totalProgress / (numberOfMRJobs * totalSQLs)
- }catch{
- case e:Exception => totalProgress = 0.0f
+ } catch {
+ case e: Exception => totalProgress = 0.0f
case _ => totalProgress = 0.0f
}
logger.debug(s"hive progress is $totalProgress")
if (totalProgress.isNaN || totalProgress.isInfinite) return 0.0f
totalProgress + currentBegin
- }else 0.0f
+ } else 0.0f
}
override def getProgressInfo: Array[JobProgressInfo] = {
- val arrayBuffer:ArrayBuffer[JobProgressInfo] = new ArrayBuffer[JobProgressInfo]()
- singleSqlProgressMap synchronized{
+ val arrayBuffer: ArrayBuffer[JobProgressInfo] = new ArrayBuffer[JobProgressInfo]()
+ singleSqlProgressMap synchronized {
val set = singleSqlProgressMap.keySet()
val tempSet = new util.HashSet[RunningJob](HadoopJobExecHelper.runningJobs)
import scala.collection.JavaConverters._
set.asScala foreach {
- key => if (!tempSet.contains(key)){
- arrayBuffer += JobProgressInfo(key, 200, 0, 0, 200)
- }
+ key =>
+ if (!tempSet.contains(key)) {
+ arrayBuffer += JobProgressInfo(key, 200, 0, 0, 200)
+ }
}
}
HadoopJobExecHelper.runningJobs synchronized {
HadoopJobExecHelper.runningJobs foreach {
- runningJob => val succeedTask = ((runningJob.mapProgress() + runningJob.reduceProgress()) * 100).asInstanceOf[Int]
- if (succeedTask.equals(totalTask.asInstanceOf[Int]) || runningJob.isComplete || runningJob.isSuccessful){
+ runningJob =>
+ val succeedTask = ((runningJob.mapProgress() + runningJob.reduceProgress()) * 100).asInstanceOf[Int]
+ if (succeedTask.equals(totalTask.asInstanceOf[Int]) || runningJob.isComplete || runningJob.isSuccessful) {
arrayBuffer += JobProgressInfo(runningJob.getID.toString, totalTask.asInstanceOf[Int], 0, 0, totalTask.asInstanceOf[Int])
- }else{
+ } else {
arrayBuffer += JobProgressInfo(runningJob.getID.toString, totalTask.asInstanceOf[Int], 1, 0, succeedTask)
}
}
@@ -350,6 +390,11 @@
override def killTask(taskID: String): Unit = {
LOG.info(s"hive begins to kill job with id : ${taskID}")
HadoopJobExecHelper.killRunningJobs()
+ //Utils.tryQuietly(TezJobExecHelper.killRunningJobs())
+ Utils.tryQuietly(HiveInterruptUtils.interrupt())
+ if (null != thread) {
+ Utils.tryAndWarn(thread.interrupt())
+ }
clearCurrentProgress()
singleSqlProgressMap.clear()
HiveProgressHelper.clearHiveProgress()
@@ -400,28 +445,20 @@
class HiveDriverProxy(driver: Any) extends Logging {
def getSchema(): Schema = {
- Utils.tryAndWarn {
- driver.getClass.getMethod("getSchema").invoke(driver).asInstanceOf[Schema]
- }
+ driver.getClass.getMethod("getSchema").invoke(driver).asInstanceOf[Schema]
}
def run(): CommandProcessorResponse = {
- Utils.tryAndWarn {
- driver.getClass.getMethod("run").invoke(driver).asInstanceOf[CommandProcessorResponse]
- }
+ driver.getClass.getMethod("run").invoke(driver).asInstanceOf[CommandProcessorResponse]
}
def run(command: String): CommandProcessorResponse = {
- Utils.tryAndWarn {
- driver.getClass.getMethod("run", classOf[String]).invoke(driver, command.asInstanceOf[AnyRef]).asInstanceOf[CommandProcessorResponse]
- }
+ driver.getClass.getMethod("run", classOf[String]).invoke(driver, command.asInstanceOf[AnyRef]).asInstanceOf[CommandProcessorResponse]
}
def setTryCount(retry: Int): Unit = {
if (HiveDriverProxy.HAS_COMMAND_NEED_RETRY_EXCEPTION) {
- Utils.tryAndWarn {
- driver.getClass.getMethod("setTryCount", classOf[Int]).invoke(driver, retry.asInstanceOf[AnyRef])
- }
+ driver.getClass.getMethod("setTryCount", classOf[Int]).invoke(driver, retry.asInstanceOf[AnyRef])
}
}
@@ -430,13 +467,20 @@
driver.getClass.getMethod("getResults", classOf[util.List[_]]).invoke(driver, res.asInstanceOf[AnyRef]).asInstanceOf[Boolean]
}
}
+
+ def close(): Unit = {
+ info("start to close driver")
+ driver.getClass.getMethod("close").invoke(driver)
+ driver.getClass.getMethod("destroy").invoke(driver)
+ info("Finished to close driver")
+ }
+
}
object HiveDriverProxy extends Logging {
-
private val COMMAND_NEED_RETRY_EXCEPTION_CLASS_STR = "org.apache.hadoop.hive.ql.CommandNeedRetryException"
private val IDRIVER_CLASS_STR = "org.apache.hadoop.hive.ql.IDriver"
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
index fa3a30d..a2a0a90 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddJarsEngineHook.scala
@@ -20,8 +20,8 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineplugin.hive.executor.HiveEngineConnExecutor
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.manager.label.entity.engine.{CodeLanguageLabel, RunType}
@@ -57,7 +57,7 @@
try {
val sql = addSql + jar
logger.info("begin to run hive sql {}", sql)
- ComputationExecutorManager.getInstance.getExecutorByLabels(labels) match {
+ ExecutorManager.getInstance.getExecutorByLabels(labels) match {
case executor: HiveEngineConnExecutor =>
executor.executeLine(new EngineExecutionContext(executor), sql)
case _ =>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala
new file mode 100644
index 0000000..e0fa484
--- /dev/null
+++ b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/HiveAddMetaTableNameHook.scala
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.engineplugin.hive.hook
+
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationExecutorHook
+import com.webank.wedatasphere.linkis.engineplugin.hive.exception.HiveQueryFailedException
+import org.apache.commons.lang.StringUtils
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars
+
+import java.util
+import java.util.regex.Pattern
+import scala.collection.JavaConverters.mapAsScalaMapConverter
+
+class HiveAddMetaTableNameHook extends ComputationExecutorHook with Logging {
+
+ private val HIVE_USE_TABLENAME_REGEX = ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname + "\\s*=\\s*(true|false)"
+
+ override def getHookName(): String = "Add tableName config in metadata of hive result."
+
+ override def beforeExecutorExecute(engineExecutionContext: EngineExecutionContext, engineCreationContext: EngineCreationContext, codeBeforeHook: String): String = {
+ val configMap = new util.HashMap[String, String]()
+ engineCreationContext.getOptions.asScala.filterNot(_._2.isInstanceOf[util.Map[String, Any]]).foreach(kv => configMap.put(kv._1, s"${kv._2}"))
+ engineExecutionContext.getProperties.asScala.filterNot(_._2.isInstanceOf[util.Map[String, Any]]).foreach(kv => configMap.put(kv._1, s"${kv._2}"))
+ if (configMap.containsKey(ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname)) {
+ engineExecutionContext.setEnableResultsetMetaWithTableName(configMap.get(ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname).toBoolean)
+ }
+
+ if (codeBeforeHook.contains(ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname)) {
+ val pattern = Pattern.compile(HIVE_USE_TABLENAME_REGEX)
+ codeBeforeHook.split("\n").foreach(line => {
+ if (StringUtils.isNotBlank(line)) {
+ val mather = pattern.matcher(line)
+ if (mather.find()) {
+ val value = mather.group(1)
+ Utils.tryCatch {
+ val boolValue = value.toBoolean
+ if (engineExecutionContext.getProperties.containsKey(ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES.varname)) {
+ warn(s"Should not add param ${mather.group()} in both code and starupMap, will use the param in code.")
+ }
+ engineExecutionContext.setEnableResultsetMetaWithTableName(boolValue)
+ } {
+ case e: IllegalArgumentException =>
+ throw HiveQueryFailedException(41006, s"Invalid value : ${value} in param [${mather.group()}]")
+ }
+ }
+ }
+ })
+ }
+ codeBeforeHook
+ }
+}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/UseDatabaseEngineHook.scala b/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/UseDatabaseEngineHook.scala
deleted file mode 100644
index 07a0401..0000000
--- a/linkis-engineconn-plugins/engineconn-plugins/hive/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/hive/hook/UseDatabaseEngineHook.scala
+++ /dev/null
@@ -1,60 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.linkis.engineplugin.hive.hook
-
-import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
-import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
-import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
-import com.webank.wedatasphere.linkis.engineplugin.hive.executor.HiveEngineConnExecutor
-import com.webank.wedatasphere.linkis.manager.label.entity.Label
-import com.webank.wedatasphere.linkis.manager.label.entity.engine.{CodeLanguageLabel, RunType}
-import org.apache.commons.lang.StringUtils
-
-class UseDatabaseEngineHook extends EngineConnHook with Logging {
-
- private val USER: String = "user"
-
- override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = {}
-
- override def beforeExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {}
-
- override def afterExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = Utils.tryAndError {
- val options = engineCreationContext.getOptions
- val user: String = if (options.containsKey(USER)) options.get(USER) else {
- Utils.getJvmUser
- }
- val database = if (StringUtils.isNotEmpty(user)) {
- user + "_ind"
- } else {
- "default"
- }
- val useDataBaseSql = "use " + database
- info(s"hive client begin to run init_code $useDataBaseSql")
- val codeLanguageLabel = new CodeLanguageLabel
- codeLanguageLabel.setCodeType(RunType.HIVE.toString)
- val labels = Array[Label[_]](codeLanguageLabel)
- ComputationExecutorManager.getInstance.getExecutorByLabels(labels) match {
- case executor: HiveEngineConnExecutor =>
- executor.executeLine(new EngineExecutionContext(executor), useDataBaseSql)
- case _ =>
- }
-
- }
-}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/io_file/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/io_file/pom.xml
index dc7c950..ef201ad 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/io_file/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/io_file/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/linkis-engineconn.properties b/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/linkis-engineconn.properties
index b67de26..015a5c3 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/linkis-engineconn.properties
+++ b/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/linkis-engineconn.properties
@@ -29,4 +29,5 @@
wds.linkis.engineconn.support.parallelism=true
-wds.linkis.engineconn.max.free.time=0
\ No newline at end of file
+wds.linkis.engineconn.max.free.time=0
+wds.linkis.engine.push.log.enable=false
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/log4j2-engineconn.xml
index 66388d7..26d136d 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/resources/log4j2-engineconn.xml
@@ -22,13 +22,17 @@
<ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Console>
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
<root level="INFO">
<appender-ref ref="Console"/>
</root>
-
-
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+ <appender-ref ref="stderr"/>
+ </logger>
</loggers>
</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala
index 0e00406..9338823 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/io_file/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/io/executor/IoEngineConnExecutor.scala
@@ -34,7 +34,7 @@
import com.webank.wedatasphere.linkis.scheduler.executer.{AliasOutputExecuteResponse, ExecuteResponse, SuccessExecuteResponse}
import com.webank.wedatasphere.linkis.storage.FSFactory
import com.webank.wedatasphere.linkis.storage.domain.{MethodEntity, MethodEntitySerializer}
-import com.webank.wedatasphere.linkis.storage.exception.StorageErrorException
+import com.webank.wedatasphere.linkis.storage.exception.{StorageErrorCode, StorageErrorException}
import com.webank.wedatasphere.linkis.storage.fs.FileSystem
import com.webank.wedatasphere.linkis.storage.utils.StorageUtils
import org.apache.commons.io.IOUtils
@@ -202,11 +202,11 @@
val fsType = methodEntity.fsType
val proxyUser = methodEntity.proxyUser
if(!userFSInfos.containsKey(proxyUser)) {
- throw new StorageErrorException(53001,s"not exist storage $fsType, please init first.")
+ throw new StorageErrorException(StorageErrorCode.FS_NOT_INIT.getCode, s"not exist storage $fsType, please init first.")
}
userFSInfos.get(proxyUser) synchronized {
val userFsInfo = userFSInfos.get(proxyUser).find(fsInfo => fsInfo != null && fsInfo.id == methodEntity.id)
- .getOrElse(throw new StorageErrorException(53001,s"not exist storage $fsType, please init first."))
+ .getOrElse(throw new StorageErrorException(StorageErrorCode.FS_NOT_INIT.getCode, s"not exist storage $fsType, please init first."))
userFsInfo.lastAccessTime = System.currentTimeMillis()
userFsInfo.fs
}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/jdbc/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/jdbc/pom.xml
index 3bcbf80..c17399f 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/jdbc/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/jdbc/pom.xml
@@ -22,7 +22,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/jdbc/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/jdbc/src/main/resources/log4j2-engineconn.xml
index 4d82457..2bca31c 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/jdbc/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/jdbc/src/main/resources/log4j2-engineconn.xml
@@ -18,12 +18,19 @@
<configuration status="error" monitorInterval="30">
<appenders>
<Console name="Console" target="SYSTEM_OUT">
- <ThresholdFilter level="trace" onMatch="ACCEPT" onMismatch="DENY"/>
+ <ThresholdFilter level="INFO" onMatch="ACCEPT" onMismatch="DENY"/>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Console>
<Send name="Send" >
+ <Filters>
+ <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY" />
+ </Filters>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Send>
+
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
<root level="INFO">
@@ -31,6 +38,9 @@
<appender-ref ref="Console"/>
<appender-ref ref="Send"/>
</root>
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+ <appender-ref ref="stderr"/>
+ </logger>
<logger name="com.netflix.discovery" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
@@ -62,6 +72,9 @@
<appender-ref ref="Send"/>
</logger>
+ <logger name="org.apache.hadoop.ipc.Client" level="ERROR" additivity="true">
+ <appender-ref ref="Send"/>
+ </logger>
</loggers>
</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/pipeline/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/pipeline/pom.xml
index 25500f0..d45e0ad 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/pipeline/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/pipeline/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/pipeline/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/pipeline/src/main/resources/log4j2-engineconn.xml
index 0f5dd45..b1a2b13 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/pipeline/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/pipeline/src/main/resources/log4j2-engineconn.xml
@@ -27,15 +27,25 @@
<DefaultRolloverStrategy max="20"/>
</RollingFile>
<Send name="Send" >
+ <Filters>
+ <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY" />
+ </Filters>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Send>
+
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
- <root level="INFO">
+ <root level="INFO">
<!--<appender-ref ref="RollingFile"/>-->
<appender-ref ref="Console"/>
<appender-ref ref="Send"/>
</root>
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+ <appender-ref ref="stderr"/>
+ </logger>
<logger name="com.netflix.discovery" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
@@ -48,12 +58,16 @@
<logger name="com.webank.wedatasphere.linkis.server.security" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
- <logger name="org.apache.hadoop.hive.ql.exec.mr.ExecDriver" level="fatal" additivity="true">
+ <logger name="org.apache.hadoop.hive.ql.exec.mr.ExecDriver" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
<logger name="org.apache.hadoop.hdfs.KeyProviderCache" level="fatal" additivity="true">
<appender-ref ref="Send"/>
</logger>
+
+ <logger name="org.apache.hadoop.ipc.Client" level="ERROR" additivity="true">
+ <appender-ref ref="Send"/>
+ </logger>
</loggers>
</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/python/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/python/pom.xml
index b94d4f5..26ad478 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/python/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/python/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/linkis-engineconn.properties b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/linkis-engineconn.properties
index 0c121f5..17f7299 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/linkis-engineconn.properties
+++ b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/linkis-engineconn.properties
@@ -24,3 +24,5 @@
#wds.linkis.keytab.enable=true
wds.linkis.engineconn.plugin.default.class=com.webank.wedatasphere.linkis.manager.engineplugin.python.PythonEngineConnPlugin
+
+wds.linkis.engine.connector.hooks=com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationEngineConnHook,com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.PyFunctionEngineHook
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/log4j2-engineconn.xml
index ff89c4e..cdac76c 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/resources/log4j2-engineconn.xml
@@ -23,45 +23,25 @@
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Console>
- <Send name="Send" >
+ <Send name="Send">
+ <Filters>
+ <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY"/>
+ </Filters>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Send>
+
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
<root level="INFO">
- <!--<appender-ref ref="RollingFile"/>-->
<appender-ref ref="Console"/>
<appender-ref ref="Send"/>
</root>
- <logger name="com.netflix.discovery" level="warn" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.apache.hadoop.yarn" level="warn" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.springframework" level="warn" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="com.webank.wedatasphere.linkis" level="warn" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.apache.hadoop.hive.ql.exec.mr.ExecDriver" level="fatal" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.apache.hadoop.hdfs.KeyProviderCache" level="fatal" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.spark_project.jetty" level="ERROR" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.eclipse.jetty" level="ERROR" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.springframework" level="ERROR" additivity="true">
- <appender-ref ref="Send"/>
- </logger>
- <logger name="org.reflections.Reflections" level="ERROR" additivity="true">
- <appender-ref ref="Send"/>
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter" level="error"
+ additivity="true">
+ <appender-ref ref="stderr"/>
</logger>
</loggers>
</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.scala
index 18d0b24..10bebbc 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonEngineConnExecutor.scala
@@ -52,7 +52,6 @@
info(s" EngineExecutionContext user python.version = > ${pythonVersion}")
System.getProperties.put("python.version", pythonVersion)
info(s" System getProperties python.version = > ${System.getProperties.getProperty("python.version")}")
- System.getProperties.put("python.application.pyFiles", engineExecutionContext.getProperties.getOrDefault("python.application.pyFiles", "file:///mnt/bdap/test/test/test.zip").toString)
if(engineExecutionContext != this.engineExecutionContext){
this.engineExecutionContext = engineExecutionContext
pythonSession.setEngineExecutionContext(engineExecutionContext)
diff --git a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonSession.scala b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonSession.scala
index 7c9eec5..29bb6b0 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonSession.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/python/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/python/executor/PythonSession.scala
@@ -84,7 +84,7 @@
gatewayServer.start()
info("Python executor file path is: " + getClass.getClassLoader.getResource("python/python.py").toURI)
val pythonClasspath = new StringBuilder(PythonSession.pythonPath)
- val pyFiles = PythonEngineConfiguration.PYTHON_PATH.getValue
+ val pyFiles = PythonEngineConfiguration.PYTHON_PATH.getValue(EngineConnServer.getEngineCreationContext.getOptions)
info(s"pyFiles => ${pyFiles}")
if (StringUtils.isNotEmpty(pyFiles)) {
pythonClasspath ++= File.pathSeparator ++= pyFiles.split(",").mkString(File.pathSeparator)
diff --git a/linkis-engineconn-plugins/engineconn-plugins/shell/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/shell/pom.xml
index 3ef07f6..1cf79c1 100755
--- a/linkis-engineconn-plugins/engineconn-plugins/shell/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/shell/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/shell/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/shell/src/main/resources/log4j2-engineconn.xml
index ffacbbe..8fbd9fe 100755
--- a/linkis-engineconn-plugins/engineconn-plugins/shell/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/shell/src/main/resources/log4j2-engineconn.xml
@@ -22,15 +22,25 @@
</Console>
<Send name="Send" >
+ <Filters>
+ <ThresholdFilter level="WARN" onMatch="ACCEPT" onMismatch="DENY" />
+ </Filters>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Send>
+
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
- <root level="INFO">
+ <root level="INFO">
<!--<appender-ref ref="RollingFile"/>-->
<appender-ref ref="Console"/>
<appender-ref ref="Send"/>
</root>
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+ <appender-ref ref="stderr"/>
+ </logger>
<logger name="com.netflix.discovery" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
@@ -43,7 +53,7 @@
<logger name="com.webank.wedatasphere.linkis.server.security" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
- <logger name="org.apache.hadoop.hive.ql.exec.mr.ExecDriver" level="fatal" additivity="true">
+ <logger name="org.apache.hadoop.hive.ql.exec.mr.ExecDriver" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
<logger name="org.apache.hadoop.hdfs.KeyProviderCache" level="fatal" additivity="true">
@@ -62,6 +72,9 @@
<appender-ref ref="Send"/>
</logger>
+ <logger name="org.apache.hadoop.ipc.Client" level="ERROR" additivity="true">
+ <appender-ref ref="Send"/>
+ </logger>
</loggers>
</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/pom.xml b/linkis-engineconn-plugins/engineconn-plugins/spark/pom.xml
index 53f9ca7..0e4c687 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/pom.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
@@ -37,18 +37,52 @@
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis-engineconn-plugin-core</artifactId>
<version>${linkis.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>com.webank.wedatasphere</groupId>
<artifactId>spark-excel_2.11</artifactId>
<version>${linkis.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
<dependency>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis-computation-engineconn</artifactId>
<version>${linkis.version}</version>
+ <exclusions>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
+ </exclusions>
</dependency>
@@ -102,6 +136,15 @@
<artifactId>json4s-jackson_${scala.binary.version}</artifactId>
</exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
+
</exclusions>
</dependency>
@@ -165,6 +208,14 @@
<artifactId>commons-logging</artifactId>
<groupId>commons-logging</groupId>
</exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
</exclusions>
</dependency>
@@ -198,6 +249,14 @@
<artifactId>jackson-module-scala_${scala.binary.version}</artifactId>
<groupId>com.fasterxml.jackson.module</groupId>
</exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
</exclusions>
</dependency>
@@ -228,6 +287,14 @@
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
</exclusions>
</dependency>
@@ -257,6 +324,14 @@
<artifactId>jackson-mapper-asl</artifactId>
<groupId>org.codehaus.jackson</groupId>
</exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-core</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>com.sun.jersey</groupId>
+ <artifactId>jersey-client</artifactId>
+ </exclusion>
</exclusions>
</dependency>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/log4j2-engineconn.xml b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/log4j2-engineconn.xml
index 0fe2828..43973ea 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/log4j2-engineconn.xml
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/log4j2-engineconn.xml
@@ -27,6 +27,10 @@
</Filters>
<PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} %-5level [%t] %logger{36} %L %M - %msg%xEx%n"/>
</Send>
+
+ <File name="stderr" fileName="${env:PWD}/logs/stderr" append="true">
+ <PatternLayout pattern="%d{HH:mm:ss.SSS} %-5level %class{36} %L %M - %msg%xEx%n"/>
+ </File>
</appenders>
<loggers>
<root level="INFO">
@@ -34,6 +38,9 @@
<appender-ref ref="Console"/>
<appender-ref ref="Send"/>
</root>
+ <logger name="org.springframework.boot.diagnostics.LoggingFailureAnalysisReporter " level="error" additivity="true">
+ <appender-ref ref="stderr"/>
+ </logger>
<logger name="com.netflix.discovery" level="warn" additivity="true">
<appender-ref ref="Send"/>
</logger>
@@ -64,7 +71,9 @@
<logger name="org.reflections.Reflections" level="ERROR" additivity="true">
<appender-ref ref="Send"/>
</logger>
-
+ <logger name="org.apache.hadoop.ipc.Client" level="ERROR" additivity="true">
+ <appender-ref ref="Send"/>
+ </logger>
</loggers>
</configuration>
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py
index a8af121..8244dc3 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/resources/python/mix_pyspark.py
@@ -1,6 +1,5 @@
import sys, getopt, traceback, json, re
import os
-from py4j.protocol import Py4JJavaError, Py4JNetworkError
os.environ['PYSPARK_ALLOW_INSECURE_GATEWAY']='1'
import matplotlib
import os
@@ -11,6 +10,7 @@
for i in range(len(paths)):
sys.path.insert(0, paths[i])
+from py4j.protocol import Py4JJavaError, Py4JNetworkError
from py4j.java_gateway import java_import, JavaGateway, GatewayClient
from pyspark.conf import SparkConf
from pyspark.context import SparkContext
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/condition/EngineHooksCondition.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/condition/EngineHooksCondition.scala
deleted file mode 100644
index 74319d3..0000000
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/condition/EngineHooksCondition.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.linkis.engineplugin.spark.condition
-
-import com.webank.wedatasphere.linkis.common.conf.CommonVars
-import org.springframework.context.annotation.{Condition, ConditionContext}
-import org.springframework.core.`type`.AnnotatedTypeMetadata
-
-/**
- *
- */
-class EngineHooksCondition extends Condition {
-
- val condition = CommonVars("wds.linkis.spark.engine.hooks.enable",true).getValue
- override def matches(conditionContext: ConditionContext, annotatedTypeMetadata: AnnotatedTypeMetadata): Boolean = {
- condition
- }
-}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkConfiguration.scala
index 3d059d6..bc9ad64 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkConfiguration.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkConfiguration.scala
@@ -40,6 +40,9 @@
val SPARK_DEPLOY_MODE = CommonVars[String]("spark.submit.deployMode", "client")
+ val SPARK_APP_NAME = CommonVars[String]("spark.app.name", "Linkis-EngineConn-Spark")
+
+ val SPARK_PYTHON_VERSION = CommonVars[String]("spark.python.version", "python")
val SPARK_EXTRA_JARS = CommonVars[String]("spark.jars", "", "Additional jar package, Driver and Executor take effect(额外的jar包,Driver和Executor生效)")
@@ -57,9 +60,9 @@
val ENGINE_JAR = CommonVars[String]("wds.linkis.enginemanager.core.jar", getMainJarName)
- val DEFAULT_SPARK_JAR_NAME = CommonVars[String]("wds.linkis.ecp.spark.default.jar", "linkis-engineconn-core-1.0.0.jar")
+ val DEFAULT_SPARK_JAR_NAME = CommonVars[String]("wds.linkis.ecp.spark.default.jar", "linkis-engineconn-core-1.0.1.jar")
- val SPARK_DRIVER_CLASSPATH = CommonVars[String]("wds.linkis.spark.driver.extra.class.path", "")
+ val SPARK_DRIVER_CLASSPATH = CommonVars[String]("spark.driver.extraClassPath", "")
val SPARK_DRIVER_EXTRA_JAVA_OPTIONS = CommonVars[String]("spark.driver.extraJavaOptions", "\"-Dwds.linkis.configuration=linkis-engine.properties " + "\"")
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkResourceConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkResourceConfiguration.scala
index d0d3057..272a48f 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkResourceConfiguration.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/config/SparkResourceConfiguration.scala
@@ -24,10 +24,10 @@
*/
object SparkResourceConfiguration {
- val LINKIS_SPARK_DRIVER_MEMORY = CommonVars[Int]("spark.driver.memory", 2) //单位为G
- val LINKIS_SPARK_DRIVER_CORES = 1 //Fixed to 1(固定为1) CommonVars[Int]("wds.linkis.driver.cores", 1)
+ val LINKIS_SPARK_DRIVER_MEMORY = CommonVars[String]("spark.driver.memory", "2g")
+ val LINKIS_SPARK_DRIVER_CORES = CommonVars[Int]("spark.driver.cores", 1)
- val LINKIS_SPARK_EXECUTOR_MEMORY = CommonVars[Int]("spark.executor.memory", 4) //单位为G
+ val LINKIS_SPARK_EXECUTOR_MEMORY = CommonVars[String]("spark.executor.memory", "4g")
val LINKIS_SPARK_EXECUTOR_CORES = CommonVars[Int]("spark.executor.cores", 2)
val LINKIS_SPARK_EXECUTOR_INSTANCES = CommonVars[Int]("spark.executor.instances", 3)
val LINKIS_QUEUE_NAME = CommonVars[String]("wds.linkis.rm.yarnqueue", "default")
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/configuration/SparkEngineServerSpringConfiguration.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/configuration/SparkEngineServerSpringConfiguration.scala
deleted file mode 100644
index 32927c3..0000000
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/configuration/SparkEngineServerSpringConfiguration.scala
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.webank.wedatasphere.linkis.engineplugin.spark.configuration
-
-import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
-import com.webank.wedatasphere.linkis.engineplugin.spark.hook.{SparkPythonVersionEngineHook, UserDataBaseHook}
-import org.springframework.context.annotation.{Bean, Configuration}
-
-/**
- *
- */
-@Configuration
-class SparkEngineServerSpringConfiguration {
-
- @Bean(Array("engineHooks"))
-// def createEngineHooks(): Array[EngineConnHook] = Array(new ReleaseEngineHook, new MaxExecuteNumEngineHook,new SparkPythonVersionEngineHook, new JarUdfEngineHook, new PyUdfEngineHook, new ScalaUdfEngineHook, new PyFunctionEngineHook, new ScalaFunctionEngineHook,new UserDataBaseHook)
- def createEngineHooks(): Array[EngineConnHook] = Array(new SparkPythonVersionEngineHook, new UserDataBaseHook)
- // todo check
-}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala
index b65fd4a..b8cbba6 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkEngineConnExecutor.scala
@@ -18,8 +18,8 @@
import java.util
import java.util.concurrent.atomic.AtomicLong
-
-import com.webank.wedatasphere.linkis.common.utils.{ByteTimeUtils, Utils}
+import com.webank.wedatasphere.linkis.common.log.LogUtils
+import com.webank.wedatasphere.linkis.common.utils.{ByteTimeUtils, Logging, Utils}
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.{ComputationExecutor, EngineExecutionContext}
import com.webank.wedatasphere.linkis.engineplugin.spark.common.Kind
import com.webank.wedatasphere.linkis.engineplugin.spark.extension.{SparkPostExecutionHook, SparkPreExecutionHook}
@@ -38,13 +38,10 @@
import scala.collection.mutable.ArrayBuffer
-abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) extends ComputationExecutor {
-
- private val LOG = LoggerFactory.getLogger(getClass)
+abstract class SparkEngineConnExecutor(val sc: SparkContext, id: Long) extends ComputationExecutor with Logging{
private var initialized: Boolean = false
-
private var oldprogress: Float = 0f
private var jobGroup: String = _
@@ -58,37 +55,37 @@
private var executorLabels: util.List[Label[_]] = new util.ArrayList[Label[_]]()
override def init(): Unit = {
- LOG.info(s"Ready to change engine state!")
+ info(s"Ready to change engine state!")
// setCodeParser() // todo check
super.init()
- initialized = true
}
override def executeLine(engineExecutorContext: EngineExecutionContext, code: String): ExecuteResponse = Utils.tryFinally {
+ this.engineExecutionContext = engineExecutorContext
if (sc.isStopped) {
error("Spark application has already stopped, please restart it.")
transition(NodeStatus.Failed)
throw new LinkisJobRetryException("Spark application sc has already stopped, please restart it.")
}
- this.engineExecutionContext = engineExecutorContext
oldprogress = 0f
// val runType = engineExecutorContext.getProperties.get("runType").asInstanceOf[String]
- var kind: Kind = getKind
+ val kind: Kind = getKind
var preCode = code
+ engineExecutorContext.appendStdout(LogUtils.generateInfo(s"yarn application id: ${sc.applicationId}"))
//Pre-execution hook
Utils.tryQuietly(SparkPreExecutionHook.getSparkPreExecutionHooks().foreach(hook => preCode = hook.callPreExecutionHook(engineExecutorContext, preCode)))
//Utils.tryAndWarn(CSSparkHelper.setContextIDInfoToSparkConf(engineExecutorContext, sc))
val _code = Kind.getRealCode(preCode)
info(s"Ready to run code with kind $kind.")
- jobGroup = String.valueOf("dwc-spark-mix-code-" + queryNum.incrementAndGet())
+ jobGroup = String.valueOf("linkis-spark-mix-code-" + queryNum.incrementAndGet())
// val executeCount = queryNum.get().toInt - 1
info("Set jobGroup to " + jobGroup)
sc.setJobGroup(jobGroup, _code, true)
val response = Utils.tryFinally(runCode(this, _code, engineExecutorContext, jobGroup)) {
- this.engineExecutionContext.pushProgress(1, getProgressInfo)
+ Utils.tryAndWarn(this.engineExecutionContext.pushProgress(1, getProgressInfo))
jobGroup = null
sc.clearJobGroup()
}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
index 65f1b9b..a9040ff 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkPythonExecutor.scala
@@ -18,9 +18,9 @@
import com.webank.wedatasphere.linkis.common.conf.CommonVars
import com.webank.wedatasphere.linkis.common.utils.Utils
-import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.rs.RsOutputStream
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineconn.launch.EngineConnServer
import com.webank.wedatasphere.linkis.engineplugin.spark.Interpreter.PythonInterpreter._
import com.webank.wedatasphere.linkis.engineplugin.spark.common.{Kind, PySpark}
@@ -77,7 +77,6 @@
override def init(): Unit = {
-
setCodeParser(new PythonCodeParser)
super.init()
info("spark sql executor start")
@@ -89,7 +88,7 @@
info(s"To close python cli task $taskID")
Utils.tryAndError(close)
info(s"To delete python executor task $taskID")
- Utils.tryAndError(ComputationExecutorManager.getInstance.removeExecutor(getExecutorLabels().asScala.toArray))
+ Utils.tryAndError(ExecutorManager.getInstance.removeExecutor(getExecutorLabels().asScala.toArray))
info(s"Finished to kill python task $taskID")
}
@@ -113,8 +112,8 @@
private def initGateway = {
// 如果从前端获取到用户所设置的Python版本为Python3 则取Python3的环境变量,否则默认为Python2
logger.info(s"spark.python.version => ${engineCreationContext.getOptions.get("spark.python.version")}")
- val userDefinePythonVersion = engineCreationContext.getOptions.get("spark.python.version").toString.toLowerCase()
- val sparkPythonVersion = if(null != userDefinePythonVersion && userDefinePythonVersion.equals("python3")) "python3" else "python"
+ val userDefinePythonVersion = engineCreationContext.getOptions.getOrDefault("spark.python.version", "python").toString.toLowerCase()
+ val sparkPythonVersion = if(StringUtils.isNotBlank(userDefinePythonVersion)) userDefinePythonVersion else "python"
val pythonExec = CommonVars("PYSPARK_DRIVER_PYTHON", sparkPythonVersion).getValue
val pythonScriptPath = CommonVars("python.script.path", "python/mix_pyspark.py").getValue
@@ -133,8 +132,8 @@
pythonClasspath ++= File.pathSeparator ++= files.split(",").filter(_.endsWith(".zip")).mkString(File.pathSeparator)
}
//extra python package
- val pyFiles = sc.getConf.get("spark.application.pyFiles", "")
- logger.info(s"spark.application.pyFiles => ${pyFiles}")
+ val pyFiles = sc.getConf.get("spark.submit.pyFiles", "")
+ logger.info(s"spark.submit.pyFiles => ${pyFiles}")
//add class path zip
val classPath = CommonVars("java.class.path", "").getValue
classPath.split(";").filter(_.endsWith(".zip")).foreach(pythonClasspath ++= File.pathSeparator ++= _)
@@ -149,8 +148,16 @@
val builder = new ProcessBuilder(cmd.toStrings.toSeq.toList.asJava)
val env = builder.environment()
- if(userDefinePythonVersion.equals("python3")){
- env.put("PYSPARK_PYTHON", pythonExec)
+ if (StringUtils.isBlank(sc.getConf.get("spark.pyspark.python", ""))) {
+ info("spark.pyspark.python is null")
+ if (sparkPythonVersion.equals("python3")) {
+ info("userDefinePythonVersion is python3 will be set to PYSPARK_PYTHON")
+ env.put("PYSPARK_PYTHON", pythonExec)
+ }
+ } else {
+ val executorPython = sc.getConf.get("spark.pyspark.python")
+ info(s"set PYSPARK_PYTHON spark.pyspark.python is $executorPython")
+ env.put("PYSPARK_PYTHON", executorPython)
}
env.put("PYTHONPATH", pythonClasspath.toString())
env.put("PYTHONUNBUFFERED", "YES")
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala
index 72063f6..5d12fe4 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/executor/SparkScalaExecutor.scala
@@ -17,11 +17,11 @@
package com.webank.wedatasphere.linkis.engineplugin.spark.executor
import java.io.{BufferedReader, File}
-
import com.webank.wedatasphere.linkis.common.utils.Utils
import com.webank.wedatasphere.linkis.engineconn.computation.executor.creation.ComputationExecutorManager
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.rs.RsOutputStream
+import com.webank.wedatasphere.linkis.engineconn.core.executor.ExecutorManager
import com.webank.wedatasphere.linkis.engineplugin.spark.common.{Kind, SparkScala}
import com.webank.wedatasphere.linkis.engineplugin.spark.config.SparkConfiguration
import com.webank.wedatasphere.linkis.engineplugin.spark.entity.SparkEngineSession
@@ -157,7 +157,7 @@
val msg = ExceptionUtils.getRootCauseMessage(t)
if (msg.contains("OutOfMemoryError")) {
error("engine oom now to set status to shutdown")
- ComputationExecutorManager.getInstance.getReportExecutor.tryShutdown()
+ ExecutorManager.getInstance.getReportExecutor.tryShutdown()
}
engineExecutionContext.appendStdout("task error info: " + msg)
Results.Error
@@ -243,11 +243,6 @@
sparkILoop.in = reader
sparkILoop.initializeSynchronous()
SparkScalaExecutor.loopPostInit(sparkILoop)
- warn("spark repl has been finished, now stop it.")
- /*Future {
- sparkILoop.process(settings)
- warn("spark repl has been finished, now stop it.")
- }*/
}
protected def getField(obj: Object, name: String): Object = {
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala
index f645a2e..17990fa 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnFactory.scala
@@ -43,7 +43,7 @@
override protected def createEngineConnSession(engineCreationContext: EngineCreationContext): Any = {
val options = engineCreationContext.getOptions
val useSparkSubmit = true
- val sparkConf: SparkConf = new SparkConf(true).setAppName(options.getOrDefault("spark.app.name", "EngineConn-Spark"))
+ val sparkConf: SparkConf = new SparkConf(true)
val master = sparkConf.getOption("spark.master").getOrElse(CommonVars("spark.master", "yarn").getValue)
info(s"------ Create new SparkContext {$master} -------")
val pysparkBasePath = SparkConfiguration.SPARK_HOME.getValue
@@ -51,7 +51,7 @@
val pythonLibUris = pysparkPath.listFiles().map(_.toURI.toString).filter(_.endsWith(".zip"))
if (pythonLibUris.length == 2) {
val sparkConfValue1 = Utils.tryQuietly(CommonVars("spark.yarn.dist.files", "").getValue)
- val sparkConfValue2 = Utils.tryQuietly(options.get("spark.yarn.dist.files"))
+ val sparkConfValue2 = Utils.tryQuietly(sparkConf.get("spark.yarn.dist.files"))
if(StringUtils.isEmpty(sparkConfValue1) && StringUtils.isEmpty(sparkConfValue2))
sparkConf.set("spark.yarn.dist.files", pythonLibUris.mkString(","))
else if(StringUtils.isEmpty(sparkConfValue1))
@@ -60,8 +60,8 @@
sparkConf.set("spark.yarn.dist.files", sparkConfValue1 + "," + pythonLibUris.mkString(","))
else
sparkConf.set("spark.yarn.dist.files", sparkConfValue1 + "," + sparkConfValue2 + "," + pythonLibUris.mkString(","))
- if (!useSparkSubmit) sparkConf.set("spark.files", sparkConf.get("spark.yarn.dist.files"))
- sparkConf.set("spark.submit.pyFiles", pythonLibUris.mkString(","))
+// if (!useSparkSubmit) sparkConf.set("spark.files", sparkConf.get("spark.yarn.dist.files"))
+// sparkConf.set("spark.submit.pyFiles", pythonLibUris.mkString(","))
}
// Distributes needed libraries to workers
// when spark version is greater than or equal to 1.5.0
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnResourceFactory.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnResourceFactory.scala
index 9d86368..f8b52c4 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnResourceFactory.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/factory/SparkEngineConnResourceFactory.scala
@@ -22,20 +22,30 @@
import com.webank.wedatasphere.linkis.engineplugin.spark.config.SparkResourceConfiguration._
import com.webank.wedatasphere.linkis.manager.common.entity.resource.{DriverAndYarnResource, LoadInstanceResource, Resource, YarnResource}
import com.webank.wedatasphere.linkis.manager.engineplugin.common.resource.AbstractEngineResourceFactory
+import org.apache.commons.lang.StringUtils
class SparkEngineConnResourceFactory extends AbstractEngineResourceFactory with Logging {
override protected def getRequestResource(properties: util.Map[String, String]): Resource = {
val executorNum = LINKIS_SPARK_EXECUTOR_INSTANCES.getValue(properties)
+ val executorMemory = LINKIS_SPARK_EXECUTOR_MEMORY.getValue(properties)
+ val executorMemoryWithUnit = if (StringUtils.isNumeric(executorMemory)) {
+ executorMemory + "g"
+ } else {
+ executorMemory
+ }
+ val driverMemory = LINKIS_SPARK_DRIVER_MEMORY.getValue(properties)
+ val driverMemoryWithUnit = if (StringUtils.isNumeric(driverMemory)) {
+ driverMemory + "g"
+ } else {
+ driverMemory
+ }
+ val driverCores = LINKIS_SPARK_DRIVER_CORES.getValue(properties)
new DriverAndYarnResource(
- new LoadInstanceResource(ByteTimeUtils.byteStringAsBytes(LINKIS_SPARK_DRIVER_MEMORY.getValue(properties) + "G"),
- LINKIS_SPARK_DRIVER_CORES,
- 1),
- new YarnResource(ByteTimeUtils.byteStringAsBytes(LINKIS_SPARK_EXECUTOR_MEMORY.getValue(properties) * executorNum + "G"),
- LINKIS_SPARK_EXECUTOR_CORES.getValue(properties) * executorNum,
- 0,
- LINKIS_QUEUE_NAME.getValue(properties))
+ new LoadInstanceResource(ByteTimeUtils.byteStringAsBytes(driverMemoryWithUnit), driverCores, 1),
+ new YarnResource(ByteTimeUtils.byteStringAsBytes(executorMemoryWithUnit) * executorNum,
+ LINKIS_SPARK_EXECUTOR_CORES.getValue(properties) * executorNum, 0, LINKIS_QUEUE_NAME.getValue(properties))
)
}
}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/hook/SparkPythonVersionEngineHook.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/hook/SparkPythonVersionEngineHook.scala
deleted file mode 100644
index 34f4396..0000000
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/hook/SparkPythonVersionEngineHook.scala
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.linkis.engineplugin.spark.hook
-
-import com.webank.wedatasphere.linkis.common.utils.Logging
-import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
-import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
-
-/**
- *
- */
-class SparkPythonVersionEngineHook extends EngineConnHook with Logging{
- var _sparkpythonVersion : String = _
- var _sparkpythonExtraPackage : String = _
- var _pythonVersion : String = _
- var _pythonExtraPackage:String = _
- override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = {
- val params = engineCreationContext.getOptions
- _sparkpythonVersion = params.getOrDefault("spark.python.version","python").toString
- _sparkpythonExtraPackage = params.getOrDefault("spark.application.pyFiles","file:///mnt/bdap/test/test/test.zip").toString
- logger.info(s"spark python version => ${_sparkpythonVersion}")
- logger.info(s"spark python Extra Package => ${_sparkpythonExtraPackage}")
- params
- }
-
- override def beforeExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {
- info("use python3 execute print cmd hello. check todo")
- /*executor.execute(new ExecuteRequest with RunTypeExecuteRequest with PythonExecuteRequest{
- override val code: String = "print('hello')"
- override val runType: String = "python"
- override val sparkPythonVersion: String = _sparkpythonVersion
- override val sparkPythonExtraPackage: String = _sparkpythonExtraPackage
- override val pythonVersion: String = _pythonVersion
- override val pythonExtraPackage: String = _pythonExtraPackage
- })*/
- }
-
- override def afterExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {}
-}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/hook/UserDataBaseHook.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/hook/UserDataBaseHook.scala
deleted file mode 100644
index 2d8fe7c..0000000
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/hook/UserDataBaseHook.scala
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.linkis.engineplugin.spark.hook
-
-import com.webank.wedatasphere.linkis.common.utils.Logging
-import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
-import com.webank.wedatasphere.linkis.engineconn.common.engineconn.EngineConn
-import com.webank.wedatasphere.linkis.engineconn.common.hook.EngineConnHook
-import com.webank.wedatasphere.linkis.engineplugin.spark.exception.EngineErrorException
-
-/**
- *
- */
-class UserDataBaseHook extends EngineConnHook with Logging {
- self =>
- protected var creator: String = _
- protected var user: String = _
- protected var initSpecialCode: String = _
- val runType = "sql"
-
- @scala.throws[EngineErrorException]
- override def beforeCreateEngineConn(engineCreationContext: EngineCreationContext): Unit = {
- val params = engineCreationContext.getOptions
- creator = params.get("creator")
- user = params.get("user")
- initSpecialCode = "use " + user + "_ind;\n"
- }
-
- @scala.throws[EngineErrorException]
- override def beforeExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {
-
- }
-
- override def afterExecutionExecute(engineCreationContext: EngineCreationContext, engineConn: EngineConn): Unit = {
- /*info("Set default database for user , code: " + initSpecialCode)
- executor.execute(new ExecuteRequest with RunTypeExecuteRequest {
- override val code: String = initSpecialCode
- override val runType: String = self.runType
- })
- info("executed code: " + initSpecialCode)*/
- }
-
-}
diff --git a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala
index c749630..52957ea 100644
--- a/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala
+++ b/linkis-engineconn-plugins/engineconn-plugins/spark/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/spark/launch/SparkSubmitProcessEngineConnLaunchBuilder.scala
@@ -18,10 +18,11 @@
import java.lang.ProcessBuilder.Redirect
import java.util
-
import com.google.common.collect.Lists
+import com.webank.wedatasphere.linkis.common.conf.CommonVars
+import com.webank.wedatasphere.linkis.engineplugin.spark.config.SparkResourceConfiguration.LINKIS_SPARK_DRIVER_MEMORY
import com.webank.wedatasphere.linkis.engineplugin.spark.config.{SparkConfiguration, SparkResourceConfiguration}
-import com.webank.wedatasphere.linkis.engineplugin.spark.launch.SparkSubmitProcessEngineConnLaunchBuilder.{AbsolutePath, Path, RelativePath}
+import com.webank.wedatasphere.linkis.engineplugin.spark.launch.SparkSubmitProcessEngineConnLaunchBuilder.{AbsolutePath, Path, RelativePath, getValueAndRemove}
import com.webank.wedatasphere.linkis.manager.common.entity.resource.{DriverAndYarnResource, NodeResource}
import com.webank.wedatasphere.linkis.manager.engineplugin.common.conf.EnvConfiguration
import com.webank.wedatasphere.linkis.manager.engineplugin.common.launch.entity.EngineConnBuildRequest
@@ -163,24 +164,33 @@
addOpt("--deploy-mode", _deployMode)
addOpt("--name", _name)
//addOpt("--jars",Some(ENGINEMANAGER_JAR.getValue))
- info("No need to add jars for " + _jars.map(fromPath).exists(x => x.equals("hdfs:///")).toString())
- addList("--jars", _jars.map(fromPath).filter(x => x.contains("hdfs:///") != true))
+// info("No need to add jars for " + _jars.map(fromPath).exists(x => x.equals("hdfs:///")).toString())
+ _jars = _jars.filter(_.isNotBlankPath())
- if (_pyFiles.map(fromPath).exists(x => x.equals("hdfs:///")) != true) {
- addList("--py-files", _pyFiles.map(fromPath).map(_.toString))
+ if(_jars.nonEmpty) {
+ addList("--jars", _jars.map(fromPath))
}
- if (_files.map(fromPath).exists(x => x.equals("hdfs:///")) != true) {
+
+ _pyFiles = _pyFiles.filter(_.isNotBlankPath())
+ if(_pyFiles.nonEmpty) {
+ addList("--py-files", _pyFiles.map(fromPath))
+ }
+
+ _files = _files.filter(_.isNotBlankPath())
+ if(_files.nonEmpty) {
addList("--files", _files.map(fromPath))
}
+
+ _archives = _archives.filter(_.isNotBlankPath())
+ if(_archives.nonEmpty) {
+ addList("--archives", _archives.map(fromPath))
+ }
_conf.foreach {
case (key, value) =>
if (key.startsWith("spark.")) {
// subcommand cannot be quoted by double quote, use single quote instead
addOpt("--conf", Some(key + "=\"" + value + "\""))
}
- else if (key.startsWith("hive.")) {
- addOpt("--hiveconf", Some(key + "=\"" + value + "\""))
- }
}
addOpt("--driver-memory", _driverMemory)
addClasspath("--driver-class-path", _driverClassPath)
@@ -192,14 +202,14 @@
addOpt("--class", _className)
addOpt("1>", Some(s"${variable(LOG_DIRS)}/stdout"))
- addOpt("2>", Some(s"${variable(LOG_DIRS)}/stderr"))
+ addOpt("2>>", Some(s"${variable(LOG_DIRS)}/stderr"))
addOpt("", Some(s" ${variable(PWD)}/lib/${SparkConfiguration.ENGINE_JAR.getValue}"))
- commandLine.toArray
+ commandLine.toArray.filter(StringUtils.isNotEmpty)
}
- override def isAddSparkConfig = true
+ override def enablePublicModule = true
def master(masterUrl: String): SparkSubmitProcessEngineConnLaunchBuilder = {
_master = Some(masterUrl)
@@ -327,26 +337,34 @@
}
this.conf(SparkConfiguration.SPARK_DRIVER_EXTRA_JAVA_OPTIONS.key, driverJavaSet.toString())
//this.conf("spark.sql.extensions", "com.webank.wedatasphere.linkis.hook.spark.extension.SparkHistoryExtension")
- this.name(properties.getOrDefault("appName", "linkis"))
- this.className(properties.getOrDefault("className", getMainClass))
- properties.getOrDefault("archives", "").toString.split(",").map(RelativePath).foreach(this.archive)
- this.driverCores(SparkResourceConfiguration.LINKIS_SPARK_DRIVER_CORES)
- this.driverMemory(SparkResourceConfiguration.LINKIS_SPARK_DRIVER_MEMORY.getValue(properties) + "G")
- this.executorCores(SparkResourceConfiguration.LINKIS_SPARK_EXECUTOR_CORES.getValue(properties))
- this.executorMemory(SparkResourceConfiguration.LINKIS_SPARK_EXECUTOR_MEMORY.getValue(properties) + "G")
- this.numExecutors(SparkResourceConfiguration.LINKIS_SPARK_EXECUTOR_INSTANCES.getValue(properties))
- properties.getOrDefault("files", "").split(",").map(RelativePath).foreach(file)
- properties.getOrDefault("jars", "").split(",").map(RelativePath).foreach(jar)
- val defaultExternalJars = if (StringUtils.isNotBlank(properties.get("spark.external.jars"))) {
- properties.get("spark.external.jars")
+ this.className(getValueAndRemove(properties,"className", getMainClass))
+
+ getValueAndRemove(properties,"archives", "").toString.split(",").map(AbsolutePath).foreach(this.archive)
+ this.driverCores(getValueAndRemove(properties, SparkResourceConfiguration.LINKIS_SPARK_DRIVER_CORES))
+ val driverMemory = getValueAndRemove(properties, LINKIS_SPARK_DRIVER_MEMORY)
+ val driverMemoryWithUnit = if (StringUtils.isNumeric(driverMemory)) {
+ driverMemory + "g"
} else {
- SparkConfiguration.SPARK_DEFAULT_EXTERNAL_JARS_PATH.getValue.toString
+ driverMemory
}
+ this.driverMemory(driverMemoryWithUnit)
+ this.executorCores(getValueAndRemove(properties, SparkResourceConfiguration.LINKIS_SPARK_EXECUTOR_CORES))
+ val executorMemory = getValueAndRemove(properties, SparkResourceConfiguration.LINKIS_SPARK_EXECUTOR_MEMORY)
+ val executorMemoryWithUnit = if (StringUtils.isNumeric(executorMemory)) {
+ executorMemory + "g"
+ } else {
+ executorMemory
+ }
+ this.executorMemory(executorMemoryWithUnit)
+ this.numExecutors(getValueAndRemove(properties, SparkResourceConfiguration.LINKIS_SPARK_EXECUTOR_INSTANCES))
+ getValueAndRemove(properties,"files", "").split(",").map(AbsolutePath).foreach(file)
+ getValueAndRemove(properties,"jars", "").split(",").map(AbsolutePath).foreach(jar)
+ val defaultExternalJars = getValueAndRemove(properties, SparkConfiguration.SPARK_DEFAULT_EXTERNAL_JARS_PATH)
defaultExternalJars.split(",").map(AbsolutePath).filter(x => {
val file = new java.io.File(x.path)
file.isFile
}).foreach(jar)
- proxyUser(properties.getOrDefault("proxyUser", ""))
+ proxyUser(getValueAndRemove(properties, "proxyUser", ""))
if (null != darResource) {
this.queue(darResource.yarnResource.queueName)
} else {
@@ -354,7 +372,7 @@
}
- this.driverClassPath(SparkConfiguration.SPARK_DRIVER_CLASSPATH.getValue)
+ this.driverClassPath(getValueAndRemove(properties, SparkConfiguration.SPARK_DRIVER_CLASSPATH))
this.driverClassPath(variable(CLASSPATH))
this.redirectOutput(Redirect.PIPE)
this.redirectErrorStream(true)
@@ -369,21 +387,24 @@
}
}
}
- this.env("spark.app.name", properties.getOrDefault("appName", "linkis" + this._userWithCreator.creator))
-
+ //deal spark conf and spark.hadoop.*
+ val iterator = properties.entrySet().iterator()
+ val sparkConfKeys = ArrayBuffer[String]()
+ while (iterator.hasNext) {
+ val keyValue = iterator.next()
+ if (!SparkConfiguration.SPARK_PYTHON_VERSION.key.equals(keyValue.getKey) && keyValue.getKey.startsWith("spark.") && StringUtils.isNotBlank(keyValue.getValue)) {
+ conf(keyValue.getKey, keyValue.getValue)
+ sparkConfKeys += keyValue.getKey
+ }
+ }
+ this.name(getValueAndRemove(properties, SparkConfiguration.SPARK_APP_NAME ) + "_" +this._userWithCreator.creator)
+ sparkConfKeys.foreach(properties.remove(_))
}
private def fromPath(path: Path): String = path match {
case AbsolutePath(p) => p
- case RelativePath(p) =>
- if (p.startsWith("hdfs://")) {
- p
- } else if (p.startsWith("file://")) {
- p
- } else {
- fsRoot + "/" + p
- }
+ case RelativePath(p) => p
}
override protected def getEngineConnManagerHooks(implicit engineConnBuildRequest: EngineConnBuildRequest): util.List[String] = {
@@ -398,10 +419,35 @@
new SparkSubmitProcessEngineConnLaunchBuilder
}
- sealed trait Path
+ sealed trait Path {
- case class AbsolutePath(path: String) extends Path
+ def isNotBlankPath(): Boolean;
- case class RelativePath(path: String) extends Path
+ protected def isNotBlankPath(path: String): Boolean = {
+ StringUtils.isNotBlank(path) && !"/".equals(path.trim) && !"hdfs:///".equals(path.trim) && !"file:///".equals(path.trim)
+ }
+ }
+
+ case class AbsolutePath(path: String) extends Path {
+ override def isNotBlankPath(): Boolean = isNotBlankPath(path)
+ }
+
+ case class RelativePath(path: String) extends Path {
+ override def isNotBlankPath(): Boolean = isNotBlankPath(path)
+ }
+
+ def getValueAndRemove[T](properties: java.util.Map[String, String], commonVars: CommonVars[T]): T = {
+ val value = commonVars.getValue(properties)
+ properties.remove(commonVars.key)
+ value
+ }
+
+ def getValueAndRemove(properties: java.util.Map[String, String], key: String, defaultValue: String): String = {
+ if (properties.containsKey(key)) {
+ properties.remove(key)
+ } else {
+ defaultValue
+ }
+ }
}
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-cache/pom.xml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-cache/pom.xml
index 85b0065..d4269f2 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-cache/pom.xml
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-cache/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!-- <relativePath>../../../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/pom.xml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/pom.xml
index 743d0ef..728bd98 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/pom.xml
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!--<relativePath>../../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala
index f9673f3..f88a711 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/conf/EnvConfiguration.scala
@@ -27,9 +27,10 @@
val HADOOP_CONF_DIR = CommonVars[String]("hadoop.config.dir", CommonVars[String]("HADOOP_CONF_DIR", "/appcom/config/hadoop-config").getValue)
- val ENGINE_CONN_JARS = CommonVars("wds.linkis.engineConn.jars", "", "extra jars for engineConn (engineConn额外的Jars)")
- val ENGINE_CONN_CLASSPATH_FILES = CommonVars("wds.linkis.engineConn.files", "", "extra configuration for engineConn (engineConn额外的配置文件)")
+ val ENGINE_CONN_JARS = CommonVars("wds.linkis.engineConn.jars", "", "engineConn额外的Jars")
+
+ val ENGINE_CONN_CLASSPATH_FILES = CommonVars("wds.linkis.engineConn.files", "", "engineConn额外的配置文件")
val ENGINE_CONN_DEFAULT_JAVA_OPTS = CommonVars[String]("wds.linkis.engineConn.javaOpts.default", s"-XX:+UseG1GC -XX:MaxPermSize=250m -XX:PermSize=128m " +
s"-Xloggc:%s -XX:+PrintGCDetails -XX:+PrintGCTimeStamps -XX:+PrintGCDateStamps -Dwds.linkis.configuration=linkis-engineconn.properties -Dwds.linkis.gateway.url=${Configuration.getGateWayURL()}")
@@ -48,5 +49,4 @@
val LOG4J2_XML_FILE = CommonVars[String]("wds.linkis.engineconn.log4j2.xml.file", "log4j2-engineconn.xml")
val LINKIS_PUBLIC_MODULE_PATH = CommonVars("wds.linkis.public_module.path", Configuration.LINKIS_HOME.getValue + "/lib/linkis-commons/public-module")
-
}
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala
index e9b3541..14aa87b 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/launch/process/JavaProcessEngineConnLaunchBuilder.scala
@@ -53,7 +53,7 @@
val commandLine: ArrayBuffer[String] = ArrayBuffer[String]()
commandLine += (variable(JAVA_HOME) + "/bin/java")
commandLine += "-server"
- val engineConnMemory = EnvConfiguration.ENGINE_CONN_MEMORY.getValue(engineConnBuildRequest.engineConnCreationDesc.properties).toString
+ val engineConnMemory = EngineConnPluginConf.JAVA_ENGINE_REQUEST_MEMORY.getValue.toString
commandLine += ("-Xmx" + engineConnMemory)
commandLine += ("-Xms" + engineConnMemory)
val javaOPTS = getExtractJavaOpts
@@ -68,7 +68,7 @@
commandLine += "-cp"
commandLine += variable(CLASSPATH)
commandLine += getMainClass
- commandLine ++= Seq("1>", s"${variable(LOG_DIRS)}/stdout", "2>", s"${variable(LOG_DIRS)}/stderr")
+ commandLine ++= Seq("1>", s"${variable(LOG_DIRS)}/stdout", "2>>", s"${variable(LOG_DIRS)}/stderr")
commandLine.toArray
}
@@ -87,7 +87,7 @@
// second, add engineconn libs.
addPathToClassPath(environment, Seq(variable(PWD), ENGINE_CONN_LIB_DIR_NAME + "/*"))
// then, add public modules.
- if (!isAddSparkConfig) {
+ if (!enablePublicModule) {
addPathToClassPath(environment, Seq(LINKIS_PUBLIC_MODULE_PATH.getValue + "/*"))
}
// finally, add the suitable properties key to classpath
@@ -124,7 +124,7 @@
protected def ifAddHiveConfigPath: Boolean = false
- protected def isAddSparkConfig: Boolean = false
+ protected def enablePublicModule: Boolean = false
override protected def getBmlResources(implicit engineConnBuildRequest: EngineConnBuildRequest): util.List[BmlResource] = {
val engineType = engineConnBuildRequest.labels.find(_.isInstanceOf[EngineTypeLabel])
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserNodeResource.scala b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserNodeResource.scala
index a581606..f79f4a0 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserNodeResource.scala
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-core/src/main/scala/com/webank/wedatasphere/linkis/manager/engineplugin/common/resource/UserNodeResource.scala
@@ -16,6 +16,8 @@
package com.webank.wedatasphere.linkis.manager.engineplugin.common.resource
+import java.util.Date
+
import com.webank.wedatasphere.linkis.manager.common.entity.resource.{NodeResource, Resource, ResourceType}
@@ -29,6 +31,9 @@
private var usedResource: Resource = _
private var lockedResource: Resource = _
private var leftResource: Resource = _
+ private var createTime: Date = _
+ private var updateTime: Date = _
+
def getUser = user
@@ -61,4 +66,20 @@
override def setLeftResource(leftResource: Resource): Unit = this.leftResource = leftResource
override def getLeftResource: Resource = this.leftResource
+
+ override def setCreateTime(createTime: Date): Unit = {
+ this.createTime = createTime
+ }
+
+ override def getCreateTime: Date = {
+ createTime
+ }
+
+ override def getUpdateTime: Date = {
+ updateTime
+ }
+
+ override def setUpdateTime(updateTime: Date): Unit = {
+ this.updateTime = updateTime
+ }
}
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-loader/pom.xml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-loader/pom.xml
index f44c897..bcc6b58 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-loader/pom.xml
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-loader/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<!--<relativePath>../../pom.xml</relativePath>-->
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/pom.xml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/pom.xml
index 1240d42..cc6c442 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/pom.xml
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml
index d369c45..2327c84 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/assembly/distribution.xml
@@ -22,6 +22,7 @@
<id>linkis-application-manager</id>
<formats>
<format>dir</format>
+ <format>zip</format>
</formats>
<includeBaseDirectory>false</includeBaseDirectory>
<baseDirectory>linkis-engineconn-plugin-server</baseDirectory>
@@ -42,31 +43,7 @@
</dependencySets>
<fileSets>
- <fileSet>
- <directory>${basedir}/src/main/resources</directory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0777</fileMode>
- <outputDirectory>conf</outputDirectory>
- <lineEnding>unix</lineEnding>
- </fileSet>
- <!--<fileSet>
- <directory>${basedir}/bin</directory>
- <includes>
- <include>*</include>
- </includes>
- <fileMode>0777</fileMode>
- <outputDirectory>bin</outputDirectory>
- <lineEnding>unix</lineEnding>
- </fileSet>
- <fileSet>
- <directory>.</directory>
- <excludes>
- <exclude>*/**</exclude>
- </excludes>
- <outputDirectory>logs</outputDirectory>
- </fileSet> -->
+
</fileSets>
</assembly>
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/application.yml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/application.yml
deleted file mode 100644
index 8cae100..0000000
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/application.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-server:
- port: 9103
-spring:
- application:
- name: linkis-cg-engineplugin
-
-
-eureka:
- client:
- serviceUrl:
- defaultZone: http://127.0.0.1:20303/eureka/
- instance:
- metadata-map:
- test: wedatasphere
-
-management:
- endpoints:
- web:
- exposure:
- include: refresh,info
- health:
- db:
- enabled: false
-logging:
- config: classpath:log4j2.xml
-
-ribbon:
- ReadTimeout: 60000
- ConnectTimeout: 60000
\ No newline at end of file
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/linkis-server.properties b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/linkis-server.properties
deleted file mode 100644
index 954571e..0000000
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/linkis-server.properties
+++ /dev/null
@@ -1,22 +0,0 @@
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-wds.linkis.engineconn.debug.enable=true
-##mybatis
-wds.linkis.server.mybatis.mapperLocations=classpath:com/webank/wedatasphere/linkis/engineplugin/server/dao/impl/*.xml
-wds.linkis.server.mybatis.typeAliasesPackage=
-wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.linkis.engineplugin.server.dao
-wds.linkis.engineConn.plugin.cache.expire-in-seconds=100000
-wds.linkis.engineConn.dist.load.enable=true
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/log4j2.xml b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/log4j2.xml
deleted file mode 100644
index 0f19fc3..0000000
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,33 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Copyright 2019 WeBank
- ~
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<configuration status="error" monitorInterval="30">
- <appenders>
- <RollingFile name="RollingFile" fileName="${env:SERVER_LOG_PATH}/linkis.log"
- filePattern="${env:SERVER_LOG_PATH}/$${date:yyyy-MM}/linkis-log-%d{yyyy-MM-dd}-%i.log">
- <PatternLayout pattern="%d{yyyy-MM-dd HH:mm:ss.SSS} [%-5level] [%-40t] %c{1.} (%L) [%M] - %msg%xEx%n"/>
- <SizeBasedTriggeringPolicy size="100MB"/>
- <DefaultRolloverStrategy max="10"/>
- </RollingFile>
- </appenders>
- <loggers>
- <root level="INFO">
- <appender-ref ref="RollingFile"/>
- </root>
- </loggers>
-</configuration>
-
diff --git a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.scala b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.scala
index d33983c..ac0c579 100644
--- a/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.scala
+++ b/linkis-engineconn-plugins/linkis-engineconn-plugin-framework/linkis-engineconn-plugin-server/src/main/scala/com/webank/wedatasphere/linkis/engineplugin/server/service/DefaultEngineConnLaunchService.scala
@@ -16,7 +16,7 @@
package com.webank.wedatasphere.linkis.engineplugin.server.service
-import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.engineplugin.server.loader.EngineConnPluginsLoader
import com.webank.wedatasphere.linkis.manager.engineplugin.common.exception.EngineConnPluginErrorException
import com.webank.wedatasphere.linkis.manager.engineplugin.common.launch.EngineConnLaunchBuilder
@@ -24,6 +24,7 @@
import com.webank.wedatasphere.linkis.manager.engineplugin.common.launch.process.{EngineConnResourceGenerator, JavaProcessEngineConnLaunchBuilder}
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel
import com.webank.wedatasphere.linkis.message.annotation.Receiver
+import org.apache.commons.lang.exception.ExceptionUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
@@ -51,7 +52,10 @@
val engineTypeOption = engineBuildRequest.labels.find(_.isInstanceOf[EngineTypeLabel])
if (engineTypeOption.isDefined) {
val engineTypeLabel = engineTypeOption.get.asInstanceOf[EngineTypeLabel]
- getEngineLaunchBuilder(engineTypeLabel).buildEngineConn(engineBuildRequest)
+ Utils.tryCatch(getEngineLaunchBuilder(engineTypeLabel).buildEngineConn(engineBuildRequest)){ t =>
+ error(s"Failed to createEngineConnLaunchRequest(${engineBuildRequest.ticketId})", t)
+ throw new EngineConnPluginErrorException(10001, s"Failed to createEngineConnLaunchRequest, ${ExceptionUtils.getRootCauseMessage(t)}")
+ }
} else {
throw new EngineConnPluginErrorException(10001, "EngineTypeLabel are requested")
}
diff --git a/linkis-engineconn-plugins/pom.xml b/linkis-engineconn-plugins/pom.xml
index cc919b0..56655f5 100644
--- a/linkis-engineconn-plugins/pom.xml
+++ b/linkis-engineconn-plugins/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-extensions/linkis-io-file-client/pom.xml b/linkis-extensions/linkis-io-file-client/pom.xml
index 46fc67f..f8fe248 100644
--- a/linkis-extensions/linkis-io-file-client/pom.xml
+++ b/linkis-extensions/linkis-io-file-client/pom.xml
@@ -17,7 +17,7 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
<relativePath>../../pom.xml</relativePath>
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/client/DefaultIOClient.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/client/DefaultIOClient.scala
index 90b5414..96f8ae0 100644
--- a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/client/DefaultIOClient.scala
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/client/DefaultIOClient.scala
@@ -19,17 +19,20 @@
import java.lang.reflect.UndeclaredThrowableException
import java.util
-import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.storage.io.orchestrator.IOFileOrchestratorFactory
import com.webank.wedatasphere.linkis.storage.io.utils.IOClientUtils
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineType
import com.webank.wedatasphere.linkis.manager.label.entity.entrance.BindEngineLabel
import com.webank.wedatasphere.linkis.manager.label.utils.{EngineTypeLabelCreator, LabelUtil}
+import com.webank.wedatasphere.linkis.orchestrator.ecm.conf.ECMPluginConf
import com.webank.wedatasphere.linkis.orchestrator.execution.{ArrayResultSetTaskResponse, FailedTaskResponse, SucceedTaskResponse}
import com.webank.wedatasphere.linkis.rpc.exception.NoInstanceExistsException
import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
import com.webank.wedatasphere.linkis.storage.domain.MethodEntity
-import com.webank.wedatasphere.linkis.storage.exception.{FSNotInitException, StorageErrorException}
+import com.webank.wedatasphere.linkis.storage.exception.{FSNotInitException, StorageErrorCode, StorageErrorException}
+import com.webank.wedatasphere.linkis.storage.io.conf.IOFileClientConf
import com.webank.wedatasphere.linkis.storage.io.orchestrator.IOFileOrchestratorFactory
import com.webank.wedatasphere.linkis.storage.io.utils.IOClientUtils
import org.springframework.stereotype.Component
@@ -40,6 +43,12 @@
private val loadBalanceLabel = IOClientUtils.getDefaultLoadBalanceLabel
+ private val extraLabels: Array[Label[_]] = Utils.tryCatch(IOClientUtils.getExtraLabels()){
+ case throwable: Throwable =>
+ error("Failed to create extraLabels, No extra labels will be used", throwable)
+ Array.empty[Label[_]]
+ }
+
override def execute(user: String, methodEntity: MethodEntity, bindEngineLabel: BindEngineLabel): String = {
val params = new util.HashMap[String, Any]()
if (null != bindEngineLabel) {
@@ -52,14 +61,15 @@
val engineTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(EngineType.mapFsTypeToEngineType(methodEntity.fsType))
IOClientUtils.addLabelToParams(loadBalanceLabel, params)
IOClientUtils.addLabelToParams(engineTypeLabel, params)
+ extraLabels.foreach(label => IOClientUtils.addLabelToParams(label, params))
val startTime = System.currentTimeMillis()
val jobReq = IOClientUtils.buildJobReq(user, methodEntity, params)
if (null == jobReq) {
- throw new StorageErrorException(52004, s"Job with id ${jobReq.getId} failed to execute method fs for user : ${user}, code : $methodEntity, Because jobReq is null")
+ throw new StorageErrorException(IOFileClientConf.IO_EXECUTE_FAILED_CODE, s"Job with id ${jobReq.getId} failed to execute method fs for user : ${user}, code : $methodEntity, Because jobReq is null")
}
val bindEngineLabel = LabelUtil.getBindEngineLabel(jobReq.getLabels)
if (null == bindEngineLabel) {
- throw new StorageErrorException(52004, s"Job with id ${jobReq.getId} failed to execute method fs for user : ${user}, code : $methodEntity, Because bindEngineLabel is null")
+ throw new StorageErrorException(IOFileClientConf.IO_EXECUTE_FAILED_CODE, s"Job with id ${jobReq.getId} failed to execute method fs for user : ${user}, code : $methodEntity, Because bindEngineLabel is null")
}
val orchestration = IOFileOrchestratorFactory.getOrchestratorSession().orchestrate(jobReq)
@@ -68,7 +78,9 @@
var initCount = 0
while (!response.isInstanceOf[SucceedTaskResponse] && initCount < retryLimit) {
initCount += 1
- response = orchestration.execute()
+ info(s"JobId ${jobReq.getId} execute method ${methodEntity} failed, to retry $initCount")
+ val reTryOrchestration = IOFileOrchestratorFactory.getOrchestratorSession().orchestrate(jobReq)
+ response = reTryOrchestration.execute()
}
val result: String = response match {
case succeedResponse: SucceedTaskResponse =>
@@ -89,18 +101,14 @@
case failedResponse: FailedTaskResponse =>
val msg = s"IO_FILE job: ${jobReq.getId} failed to execute code : ${methodEntity}, reason : ${failedResponse.getErrorMsg}."
info(msg)
- failedResponse.getCause match {
- case e: UndeclaredThrowableException =>
- if (e.getUndeclaredThrowable.isInstanceOf[NoInstanceExistsException]) {
+ if (failedResponse.getErrorMsg.contains(StorageErrorCode.FS_NOT_INIT.getMessage) || failedResponse.getErrorMsg.contains(ECMPluginConf.ECM_MARK_CACHE_ERROR_CODE.toString)) {
throw new FSNotInitException()
- }
- case _ =>
}
- throw new StorageErrorException(52005, msg)
+ throw new StorageErrorException(IOFileClientConf.IO_EXECUTE_FAILED_CODE, msg)
case o =>
val msg = s"IO_FILE job : ${jobReq.getId} failed to execute code : ${methodEntity}, return a unknown response : ${BDPJettyServerHelper.gson.toJson(o)}"
warn(msg)
- throw new StorageErrorException(52006, msg)
+ throw new StorageErrorException(IOFileClientConf.IO_EXECUTE_UNKNOWN_REASON_CODE, msg)
}
val executeTime = System.currentTimeMillis()
info(s"${jobReq.getId} execute method ${methodEntity.methodName}, orchestratorTime(${orchestrationTime - startTime}ms) execute time(${executeTime - orchestrationTime}ms)")
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/conf/IOFileClientConf.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/conf/IOFileClientConf.scala
index c473f94..5c48a35 100644
--- a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/conf/IOFileClientConf.scala
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/conf/IOFileClientConf.scala
@@ -28,4 +28,10 @@
val IO_LOADBALANCE_CAPACITY = CommonVars("wds.linkis.io.loadbalance.capacity", "1")
+ val IO_EXTRA_LABELS = CommonVars("wds.linkis.io.extra.labels", "")
+
+ val IO_EXECUTE_FAILED_CODE = 52005
+
+ val IO_EXECUTE_UNKNOWN_REASON_CODE = 52006
+
}
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOComputationOrchestratorSessionFactory.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOComputationOrchestratorSessionFactory.scala
new file mode 100644
index 0000000..b12e47b
--- /dev/null
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOComputationOrchestratorSessionFactory.scala
@@ -0,0 +1,157 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.storage.io.orchestrator
+
+import java.util
+
+import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.orchestrator.computation.ComputationOrchestratorSessionFactory
+import com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.converter.CodeConverterTransform
+import com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.converter.ruler.JobReqParamCheckRuler
+import com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.parser.DefaultCodeJobParserTransform
+import com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.physical.{CacheExecTaskTransform, CodeExecTaskTransform, JobExecTaskTransform, StageExecTaskTransform}
+import com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.reheater.PruneTaskRetryTransform
+import com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.validator.DefaultLabelRegularCheckRuler
+import com.webank.wedatasphere.linkis.orchestrator.core.OrchestratorSessionBuilder
+import com.webank.wedatasphere.linkis.orchestrator.extensions.CatalystExtensions.CatalystExtensionsBuilder
+import com.webank.wedatasphere.linkis.orchestrator.extensions.CheckRulerExtensions.CheckRulerExtensionsBuilder
+import com.webank.wedatasphere.linkis.orchestrator.extensions.OperationExtensions.OperationExtensionsBuilder
+import com.webank.wedatasphere.linkis.orchestrator.extensions.catalyst.CheckRuler.{ConverterCheckRulerBuilder, ValidatorCheckRulerBuilder}
+import com.webank.wedatasphere.linkis.orchestrator.extensions.catalyst.Transform._
+import com.webank.wedatasphere.linkis.orchestrator.extensions.catalyst._
+import com.webank.wedatasphere.linkis.orchestrator.extensions.operation.CancelOperationBuilder
+import com.webank.wedatasphere.linkis.orchestrator.extensions.{CatalystExtensions, CheckRulerExtensions, OperationExtensions}
+import com.webank.wedatasphere.linkis.orchestrator.{Orchestrator, OrchestratorSession}
+
+class IOComputationOrchestratorSessionFactory extends ComputationOrchestratorSessionFactory with Logging{
+ private val codeConverterTransformBuilder = new ConverterTransformBuilder() {
+ override def apply(v1: OrchestratorSession): ConverterTransform = new CodeConverterTransform
+ }
+
+
+ private val codeStageParserTransformBuilder = new ParserTransformBuilder() {
+ override def apply(v1: OrchestratorSession): ParserTransform = new DefaultCodeJobParserTransform
+ }
+
+ //planner builder definition
+ private val taskPlannerTransformBuilder = new PlannerTransformBuilder() {
+ override def apply(v1: OrchestratorSession): PlannerTransform = new IOTaskPlannerTransform
+ }
+
+
+ private val jobExecTaskTransformBuilder = new PhysicalTransformBuilder() {
+ override def apply(v1: OrchestratorSession): PhysicalTransform = new JobExecTaskTransform()
+ }
+
+ private val stageExecTaskTransformBuilder = new PhysicalTransformBuilder() {
+ override def apply(v1: OrchestratorSession): PhysicalTransform = new StageExecTaskTransform()
+ }
+
+ private val cacheExecTaskTransformBuilder = new PhysicalTransformBuilder() {
+ override def apply(v1: OrchestratorSession): PhysicalTransform = new CacheExecTaskTransform()
+ }
+
+ private val codeExecTaskTransformBuilder = new PhysicalTransformBuilder() {
+ override def apply(v1: OrchestratorSession): PhysicalTransform = new CodeExecTaskTransform()
+ }
+
+ private val PruneTaskRetryTransformBuilder = new ReheaterTransformBuilder() {
+ override def apply(v1: OrchestratorSession): ReheaterTransform = new PruneTaskRetryTransform()
+ }
+
+ private val catalystExtensionsBuilder: CatalystExtensionsBuilder = new CatalystExtensionsBuilder(){
+ override def apply(v1: CatalystExtensions): Unit = {
+
+ v1.injectConverterTransform(codeConverterTransformBuilder)
+
+
+ v1.injectParserTransform(codeStageParserTransformBuilder)
+
+ v1.injectPlannerTransform(taskPlannerTransformBuilder)
+
+ v1.injectPhysicalTransform(jobExecTaskTransformBuilder)
+ v1.injectPhysicalTransform(stageExecTaskTransformBuilder)
+ v1.injectPhysicalTransform(cacheExecTaskTransformBuilder)
+ v1.injectPhysicalTransform(codeExecTaskTransformBuilder)
+
+ v1.injectReheaterTransform(PruneTaskRetryTransformBuilder)
+ }
+ }
+
+ //convertCheckRuler
+ private val jobReqCheckRulerBuilder = new ConverterCheckRulerBuilder(){
+ override def apply(v1: OrchestratorSession): ConverterCheckRuler = {
+ new JobReqParamCheckRuler
+ }
+ }
+
+
+ private val labelRegularCheckRulerBuilder = new ValidatorCheckRulerBuilder(){
+ override def apply(v1: OrchestratorSession): ValidatorCheckRuler = new DefaultLabelRegularCheckRuler
+ }
+
+ private val checkRulerExtensionsBuilder: CheckRulerExtensionsBuilder = new CheckRulerExtensionsBuilder(){
+ override def apply(v1: CheckRulerExtensions): Unit = {
+
+ v1.injectConverterCheckRuler(jobReqCheckRulerBuilder)
+ v1.injectValidatorCheckRuler(labelRegularCheckRulerBuilder)
+
+ }
+ }
+
+ //Operation
+ private val cancelOperationBuilder = new CancelOperationBuilder
+
+
+ private val operationExtensionsBuilder: OperationExtensionsBuilder = new OperationExtensionsBuilder(){
+ override def apply(v1: OperationExtensions): Unit = {
+ v1.injectOperation(cancelOperationBuilder)
+ }
+ }
+
+ private val orchestrator: Orchestrator = Orchestrator.getOrchestrator
+ orchestrator.initialize()
+
+ private val orchestratorSessionMap: util.Map[String, OrchestratorSessionBuilder] = new util.HashMap[String, OrchestratorSessionBuilder]()
+
+ override def getOrCreateSession(id: String): OrchestratorSession = {
+ if (! orchestratorSessionMap.containsKey(id)) synchronized {
+ if (! orchestratorSessionMap.containsKey(id)){
+ orchestratorSessionMap.put(id, createSessionBuilder(id))
+ }
+ }
+ orchestratorSessionMap.get(id).getOrCreate()
+ }
+
+ override def getOrCreateSession(orchestratorSessionBuilder: OrchestratorSessionBuilder): OrchestratorSession = {
+ val id = orchestratorSessionBuilder.getId()
+ if (! orchestratorSessionMap.containsKey(id)) synchronized {
+ if (! orchestratorSessionMap.containsKey(id)){
+ orchestratorSessionMap.put(id, orchestratorSessionBuilder)
+ }
+ }
+ orchestratorSessionMap.get(id).getOrCreate()
+ }
+
+ override def createSessionBuilder(id: String): OrchestratorSessionBuilder = {
+ val builder = orchestrator.createOrchestratorSessionBuilder()
+ builder.setId(id)
+ builder.withCatalystExtensions(catalystExtensionsBuilder)
+ builder.withCheckRulerExtensions(checkRulerExtensionsBuilder)
+ builder.withOperationExtensions(operationExtensionsBuilder)
+ builder
+ }
+
+ override def getOrchestrator(): Orchestrator = this.orchestrator
+}
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOFileOrchestratorFactory.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOFileOrchestratorFactory.scala
index 2b6e0ea..c82d5fa 100644
--- a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOFileOrchestratorFactory.scala
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOFileOrchestratorFactory.scala
@@ -17,12 +17,23 @@
package com.webank.wedatasphere.linkis.storage.io.orchestrator
import com.webank.wedatasphere.linkis.orchestrator.OrchestratorSession
-import com.webank.wedatasphere.linkis.orchestrator.computation.ComputationOrchestratorSessionFactory
+import com.webank.wedatasphere.linkis.orchestrator.core.AbstractOrchestratorContext
+
object IOFileOrchestratorFactory {
- private lazy val orchestratorSession: OrchestratorSession = ComputationOrchestratorSessionFactory.getOrCreateExecutionFactory()
- .getOrCreateSession("io_file_cilent")
+ private val orchestratorSessionFactory = new IOComputationOrchestratorSessionFactory
+
+ private lazy val orchestratorSession: OrchestratorSession = {
+ val session = orchestratorSessionFactory.getOrCreateSession("io_file_cilent")
+
+ session.orchestrator.getOrchestratorContext match {
+ case orchestratorContext: AbstractOrchestratorContext =>
+ orchestratorContext.addGlobalPlugin(new IOUserParallelOrchestratorPlugin)
+ case _ =>
+ }
+ session
+ }
def getOrchestratorSession(): OrchestratorSession = orchestratorSession
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOTaskPlannerTransform.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOTaskPlannerTransform.scala
new file mode 100644
index 0000000..d9e0727
--- /dev/null
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOTaskPlannerTransform.scala
@@ -0,0 +1,54 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.storage.io.orchestrator
+
+import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.orchestrator.code.plans.ast.CodeJob
+import com.webank.wedatasphere.linkis.orchestrator.code.plans.logical.{CodeLogicalUnitTask, CodeLogicalUnitTaskDesc}
+import com.webank.wedatasphere.linkis.orchestrator.extensions.catalyst.PlannerTransform
+import com.webank.wedatasphere.linkis.orchestrator.plans.ast.{ASTContext, Job}
+import com.webank.wedatasphere.linkis.orchestrator.plans.logical.{EndJobTaskDesc, JobTask, Task}
+import com.webank.wedatasphere.linkis.orchestrator.plans.unit.CodeLogicalUnit
+
+class IOTaskPlannerTransform extends PlannerTransform with Logging{
+
+ override def apply(in: Job, context: ASTContext): Task = {
+ in match {
+ case job: CodeJob =>
+ val taskDesc = EndJobTaskDesc(job)
+ val jobTaskTmp = new JobTask(Array(), Array(buildCodeLogicTaskTree(job.getCodeLogicalUnit, job)))
+ jobTaskTmp.setTaskDesc(taskDesc)
+ rebuildTreeNode(jobTaskTmp)
+ case _ => error(s"unknown job type:${in.getClass} ")
+ null
+ }
+ }
+
+ def rebuildTreeNode(tmpTask: Task): Task = {
+ tmpTask.getChildren.foreach(_.withNewParents(Array(tmpTask)))
+ tmpTask
+ }
+
+ def buildCodeLogicTaskTree(codeLogicalUnit: CodeLogicalUnit, job: Job): Task = {
+ val codeLogicalUnitTaskTmp = new CodeLogicalUnitTask(Array(), Array())
+ codeLogicalUnitTaskTmp.setTaskDesc(CodeLogicalUnitTaskDesc(job))
+ if(codeLogicalUnit != null) codeLogicalUnitTaskTmp.setCodeLogicalUnit(codeLogicalUnit)
+ //rebuildTreeNode(codeLogicalUnitTaskTmp)
+ codeLogicalUnitTaskTmp
+ }
+
+
+
+ override def getName: String = "IOTaskPlannerTransform"
+}
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOUserParallelOrchestratorPlugin.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOUserParallelOrchestratorPlugin.scala
new file mode 100644
index 0000000..224bc2c
--- /dev/null
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/orchestrator/IOUserParallelOrchestratorPlugin.scala
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2019 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.storage.io.orchestrator
+
+import java.util
+
+import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.orchestrator.conf.OrchestratorConfiguration
+import com.webank.wedatasphere.linkis.orchestrator.plugin.UserParallelOrchestratorPlugin
+
+class IOUserParallelOrchestratorPlugin extends UserParallelOrchestratorPlugin{
+
+ private val MAX_RUNNER_TASK_SIZE = OrchestratorConfiguration.TASK_RUNNER_MAX_SIZE.getValue
+
+ override def getUserMaxRunningJobs(user: String, labels: util.List[Label[_]]): Int = {
+ MAX_RUNNER_TASK_SIZE
+ }
+
+ override def isReady: Boolean = true
+
+ override def start(): Unit = {}
+
+ override def close(): Unit = {
+ }
+}
diff --git a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/utils/IOClientUtils.scala b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/utils/IOClientUtils.scala
index 04835ab..78bd058 100644
--- a/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/utils/IOClientUtils.scala
+++ b/linkis-extensions/linkis-io-file-client/src/main/scala/com/webank/wedatasphere/linkis/storage/io/utils/IOClientUtils.scala
@@ -28,10 +28,13 @@
import com.webank.wedatasphere.linkis.orchestrator.domain.JobReq
import com.webank.wedatasphere.linkis.orchestrator.plans.unit.CodeLogicalUnit
import com.webank.wedatasphere.linkis.protocol.utils.TaskUtils
+import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
import com.webank.wedatasphere.linkis.storage.domain.{MethodEntity, MethodEntitySerializer}
import com.webank.wedatasphere.linkis.storage.io.conf.IOFileClientConf
import com.webank.wedatasphere.linkis.storage.utils.StorageConfiguration.IO_USER
import com.webank.wedatasphere.linkis.storage.utils.{StorageConfiguration, StorageUtils}
+import org.apache.commons.lang.StringUtils
+import scala.collection.JavaConverters._
object IOClientUtils {
@@ -59,7 +62,7 @@
private val conCurrentLabel = {
val label = labelBuilderFactory.createLabel(classOf[ConcurrentEngineConnLabel])
- label.setParallelism(10)
+ label.setParallelism("10")
label
}
@@ -70,7 +73,7 @@
}
def generateJobGrupID(): String = {
- "io_jobGrup_" + jobGroupIDGenerator.getAndIncrement()
+ "io_jobGrup_" + jobGroupIDGenerator.getAndIncrement()
}
@@ -80,6 +83,16 @@
loadBalanceLabel
}
+ def getExtraLabels(): Array[Label[_]] = {
+ val labelJson = IOFileClientConf.IO_EXTRA_LABELS.getValue
+ if (StringUtils.isNotBlank(labelJson)) {
+ val labelMap = BDPJettyServerHelper.gson.fromJson(labelJson, classOf[java.util.Map[String, Object]])
+ labelBuilderFactory.getLabels(labelMap).asScala.toArray
+ } else {
+ Array.empty[Label[_]]
+ }
+ }
+
def addLabelToParams(label: Label[_], params: util.Map[String, Any]): Unit = {
val labelMap = TaskUtils.getLabelsMap(params)
labelMap.put(label.getLabelKey, label.getStringValue)
@@ -93,7 +106,7 @@
labels.add(codeTypeLabel)
labels.add(conCurrentLabel)
- val rootUser = if(methodEntity.fsType == StorageUtils.HDFS) StorageConfiguration.HDFS_ROOT_USER.getValue else StorageConfiguration.LOCAL_ROOT_USER.getValue
+ val rootUser = if (methodEntity.fsType == StorageUtils.HDFS) StorageConfiguration.HDFS_ROOT_USER.getValue else StorageConfiguration.LOCAL_ROOT_USER.getValue
val userCreatorLabel = labelBuilderFactory.createLabel(classOf[UserCreatorLabel])
userCreatorLabel.setCreator(creator)
userCreatorLabel.setUser(rootUser)
diff --git a/linkis-extensions/pom.xml b/linkis-extensions/pom.xml
index 80003a2..9fca788 100644
--- a/linkis-extensions/pom.xml
+++ b/linkis-extensions/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
@@ -28,7 +28,6 @@
<modules>
<module>linkis-io-file-client</module>
- <module>spark-excel</module>
</modules>
</project>
\ No newline at end of file
diff --git a/linkis-extensions/spark-excel/pom.xml b/linkis-extensions/spark-excel/pom.xml
deleted file mode 100644
index 19dcb86..0000000
--- a/linkis-extensions/spark-excel/pom.xml
+++ /dev/null
@@ -1,193 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
- ~ Copyright 2019 WeBank
- ~ Licensed under the Apache License, Version 2.0 (the "License");
- ~ you may not use this file except in compliance with the License.
- ~ You may obtain a copy of the License at
- ~ http://www.apache.org/licenses/LICENSE-2.0
- ~ Unless required by applicable law or agreed to in writing, software
- ~ distributed under the License is distributed on an "AS IS" BASIS,
- ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- ~ See the License for the specific language governing permissions and
- ~ limitations under the License.
- -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <groupId>com.webank.wedatasphere</groupId>
- <artifactId>spark-excel_2.11</artifactId>
- <version>1.0.0</version>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
- <java.version>1.7</java.version>
- <maven.version>3.3.3</maven.version>
- <scala.version>2.11.8</scala.version>
- <scala.compat.version>2.11</scala.compat.version>
- <spark.version>2.4.3</spark.version>
- <hadoop.version>2.7.2</hadoop.version>
- <kafka.version>0.8.2.0</kafka.version>
- <slf4j.version>1.7.12</slf4j.version>
- <hbase.version>1.2.2</hbase.version>
- <curator.version>2.7.0</curator.version>
- <optparse.version>1.1.2</optparse.version>
- <log4j.version>1.2.17</log4j.version>
- <json4s.version>3.2.11</json4s.version>
- <guava.version>28.2-jre</guava.version>
- <hadoop-scope>provided</hadoop-scope>
- <spark-scope>provided</spark-scope>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>${slf4j.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.poi</groupId>
- <artifactId>poi-ooxml</artifactId>
- <version>3.17</version>
- </dependency>
- <dependency>
- <groupId>com.norbitltd</groupId>
- <artifactId>spoiwo_${scala.compat.version}</artifactId>
- <version>1.2.0</version>
- </dependency>
- <dependency>
- <groupId>com.monitorjbl</groupId>
- <artifactId>xlsx-streamer</artifactId>
- <version>1.2.1</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.poi</groupId>
- <artifactId>ooxml-schemas</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>com.fasterxml.jackson.module</groupId>
- <artifactId>jackson-module-scala_${scala.compat.version}</artifactId>
- <version>2.8.8</version>
- <scope>${spark-scope}</scope>
- </dependency>
- <dependency>
- <groupId>org.scalatest</groupId>
- <artifactId>scalatest_${scala.compat.version}</artifactId>
- <version>3.0.1</version>
- <scope>${spark-scope}</scope>
- </dependency>
- <dependency>
- <groupId>org.scalacheck</groupId>
- <artifactId>scalacheck_${scala.compat.version}</artifactId>
- <version>1.13.4</version>
- <scope>${spark-scope}</scope>
- </dependency>
- <dependency>
- <groupId>com.github.alexarchambault</groupId>
- <artifactId>scalacheck-shapeless_1.13_${scala.compat.version}</artifactId>
- <version>1.1.6</version>
- <scope>${spark-scope}</scope>
- </dependency>
- <dependency>
- <groupId>com.holdenkarau</groupId>
- <artifactId>spark-testing-base_${scala.compat.version}</artifactId>
- <version>2.4.3_0.12.0</version>
- <scope>${spark-scope}</scope>
- </dependency>
- <dependency>
- <groupId>org.scalamock</groupId>
- <artifactId>scalamock-scalatest-support_${scala.compat.version}</artifactId>
- <version>3.5.0</version>
- <scope>${spark-scope}</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-core_${scala.compat.version}</artifactId>
- <version>${spark.version}</version>
- <scope>${spark-scope}</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.spark</groupId>
- <artifactId>spark-sql_${scala.compat.version}</artifactId>
- <version>${spark.version}</version>
- <scope>${spark-scope}</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-hdfs</artifactId>
- <version>${hadoop.version}</version>
- <scope>${spark-scope}</scope>
- <exclusions>
- <exclusion>
- <artifactId>servlet-api</artifactId>
- <groupId>javax.servlet</groupId>
- </exclusion>
- <exclusion>
- <groupId>com.google.guava</groupId>
- <artifactId>guava</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-deploy-plugin</artifactId>
- <version>2.8.2</version>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.2</version>
- <configuration>
- <source>${java.version}</source>
- <target>${java.version}</target>
- <encoding>UTF-8</encoding>
- </configuration>
- </plugin>
- <plugin>
- <groupId>net.alchim31.maven</groupId>
- <artifactId>scala-maven-plugin</artifactId>
- <version>3.2.2</version>
- <configuration>
- <recompileMode>incremental</recompileMode>
- <args>
- <arg>-target:jvm-${java.version}</arg>
- </args>
- <javacArgs>
- <javacArg>-source</javacArg>
- <javacArg>${java.version}</javacArg>
- <javacArg>-target</javacArg>
- <javacArg>${java.version}</javacArg>
- </javacArgs>
- </configuration>
- <executions>
- <execution>
- <phase>process-resources</phase>
- <goals>
- <goal>compile</goal>
- </goals>
- </execution>
- <execution>
- <id>scala-test-compile</id>
- <phase>process-test-resources</phase>
- <goals>
- <goal>testCompile</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
-
- </plugins>
- </build>
-</project>
\ No newline at end of file
diff --git a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource.scala b/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource.scala
deleted file mode 100644
index 1b42aaa..0000000
--- a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource.scala
+++ /dev/null
@@ -1,119 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.spark.excel
-
-import org.apache.hadoop.fs.Path
-import org.apache.spark.sql.sources._
-import org.apache.spark.sql.types.StructType
-import org.apache.spark.sql.{DataFrame, SQLContext, SaveMode}
-
-class DefaultSource extends RelationProvider with SchemaRelationProvider with CreatableRelationProvider {
-
- /**
- * Creates a new relation for retrieving data from an Excel file
- */
- override def createRelation(sqlContext: SQLContext, parameters: Map[String, String]): ExcelRelation =
- createRelation(sqlContext, parameters, null)
-
- /**
- * Creates a new relation for retrieving data from an Excel file
- */
- override def createRelation(
- sqlContext: SQLContext,
- parameters: Map[String, String],
- schema: StructType
- ): ExcelRelation = {
- ExcelRelation(
- location = checkParameter(parameters, "path"),
- sheetName = parameters.get("sheetName"),
- useHeader = checkParameter(parameters, "useHeader").toBoolean,
- treatEmptyValuesAsNulls = parameters.get("treatEmptyValuesAsNulls").fold(true)(_.toBoolean),
- userSchema = Option(schema),
- inferSheetSchema = parameters.get("inferSchema").fold(false)(_.toBoolean),
- addColorColumns = parameters.get("addColorColumns").fold(false)(_.toBoolean),
- startColumn = parameters.get("startColumn").fold(0)(_.toInt),
- endColumn = parameters.get("endColumn").fold(Int.MaxValue)(_.toInt),
- timestampFormat = parameters.get("timestampFormat"),
- maxRowsInMemory = parameters.get("maxRowsInMemory").map(_.toInt),
- excerptSize = parameters.get("excerptSize").fold(10)(_.toInt),
- parameters = parameters,
- dateFormat = parameters.get("dateFormats").getOrElse("yyyy-MM-dd").split(";").toList,
- indexes = parameters.getOrElse("indexes","-1").split(",").map(_.toInt)
- )(sqlContext)
- }
-
- override def createRelation(
- sqlContext: SQLContext,
- mode: SaveMode,
- parameters: Map[String, String],
- data: DataFrame
- ): BaseRelation = {
- val path = checkParameter(parameters, "path")
- val sheetName = parameters.getOrElse("sheetName", "Sheet1")
- val useHeader = checkParameter(parameters, "useHeader").toBoolean
- val dateFormat = parameters.getOrElse("dateFormat", ExcelFileSaver.DEFAULT_DATE_FORMAT)
- val timestampFormat = parameters.getOrElse("timestampFormat", ExcelFileSaver.DEFAULT_TIMESTAMP_FORMAT)
- val exportNullValue = parameters.getOrElse("exportNullValue","SHUFFLEOFF") match {
- case "BLANK" =>""
- case s:String =>s
- }
- val filesystemPath = new Path(path)
- val fs = filesystemPath.getFileSystem(sqlContext.sparkContext.hadoopConfiguration)
- fs.setWriteChecksum(false)
- val doSave = if (fs.exists(filesystemPath)) {
- mode match {
- case SaveMode.Append =>
- sys.error(s"Append mode is not supported by ${this.getClass.getCanonicalName}")
- case SaveMode.Overwrite =>
- fs.delete(filesystemPath, true)
- true
- case SaveMode.ErrorIfExists =>
- sys.error(s"path $path already exists.")
- case SaveMode.Ignore => false
- }
- } else {
- true
- }
- if (doSave) {
- // Only save data when the save mode is not ignore.
- (new ExcelFileSaver(fs)).save(
- filesystemPath,
- data,
- sheetName = sheetName,
- useHeader = useHeader,
- dateFormat = dateFormat,
- timestampFormat = timestampFormat,
- exportNullValue = exportNullValue
- )
- }
-
- createRelation(sqlContext, parameters, data.schema)
- }
-
- // Forces a Parameter to exist, otherwise an exception is thrown.
- private def checkParameter(map: Map[String, String], param: String): String = {
- if (!map.contains(param)) {
- throw new IllegalArgumentException(s"Parameter ${'"'}$param${'"'} is missing in options.")
- } else {
- map.apply(param)
- }
- }
-
- // Gets the Parameter if it exists, otherwise returns the default argument
- private def parameterOrDefault(map: Map[String, String], param: String, default: String) =
- map.getOrElse(param, default)
-}
diff --git a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource15.scala b/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource15.scala
deleted file mode 100644
index ebe1849..0000000
--- a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/DefaultSource15.scala
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.spark.excel
-
-import org.apache.spark.sql.sources.DataSourceRegister
-
-class DefaultSource15 extends DefaultSource with DataSourceRegister {
- override def shortName(): String = "excel"
-}
diff --git a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/ExcelFileSaver.scala b/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/ExcelFileSaver.scala
deleted file mode 100644
index 5736204..0000000
--- a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/ExcelFileSaver.scala
+++ /dev/null
@@ -1,180 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.spark.excel
-
-import java.io.{BufferedOutputStream, OutputStream}
-import java.text.SimpleDateFormat
-import java.util.Date
-
-import org.apache.hadoop.fs.{FileSystem, Path}
-import org.apache.poi.ss.usermodel.{IndexedColors, _}
-import org.apache.poi.xssf.streaming.SXSSFWorkbook
-import org.apache.spark.sql.DataFrame
-
-import scala.language.postfixOps
-
-object ExcelFileSaver {
- final val DEFAULT_SHEET_NAME = "Sheet1"
- final val DEFAULT_DATE_FORMAT = "yyyy-MM-dd"
- final val DEFAULT_TIMESTAMP_FORMAT = "yyyy-mm-dd hh:mm:ss.000"
- final val DEFAULT_EXPORT_NULL_VALUE = "SHUFFLEOFF"
-}
-
-class ExcelFileSaver(fs: FileSystem) {
-
- import ExcelFileSaver._
-
- def save(
- location: Path,
- dataFrame: DataFrame,
- sheetName: String = DEFAULT_SHEET_NAME,
- useHeader: Boolean = true,
- dateFormat: String = DEFAULT_DATE_FORMAT,
- timestampFormat: String = DEFAULT_TIMESTAMP_FORMAT,
- exportNullValue:String = DEFAULT_EXPORT_NULL_VALUE
- ): Unit = {
- fs.setVerifyChecksum(false)
- val headerRow = dataFrame.schema.map(_.name)
- val dataRows = dataFrame.toLocalIterator()
- val excelWriter = new ExcelWriter(sheetName, dateFormat, timestampFormat)
- if (useHeader) excelWriter.writeHead(headerRow)
- while (dataRows.hasNext) {
- val line = dataRows.next().toSeq
- excelWriter.writerRow(line,exportNullValue)
- }
- // if failed try to refresh nfs cache
- val out: OutputStream =
- try{
- fs.create(location)
- } catch {
- case e:Throwable =>
- fs.listFiles(location.getParent, false)
- fs.create(location)
- case _ =>
- fs.listFiles(location.getParent, false)
- fs.create(location)
- }
- excelWriter.close(new BufferedOutputStream(out))
- }
-
- def autoClose[A <: AutoCloseable, B](closeable: A)(fun: (A) => B): B = {
- try {
- fun(closeable)
- } finally {
- closeable.close()
- }
- }
-
-
-}
-
-class ExcelWriter(sheetName: String, dateFormat: String, timestampFormat: String) {
-
-
- val wb = new SXSSFWorkbook(100)
- val sheet = wb.createSheet(sheetName)
- val df = new SimpleDateFormat(dateFormat)
- val tf = new SimpleDateFormat(timestampFormat)
- val createHelper = wb.getCreationHelper
- val sdf = wb.createCellStyle()
- sdf.setDataFormat(createHelper.createDataFormat().getFormat(dateFormat))
- val stf = wb.createCellStyle()
- stf.setDataFormat(createHelper.createDataFormat().getFormat(timestampFormat))
-
- var rowNum = 0
- var columnsLen = 0
-
- def writeHead(headRow: Seq[String]): Unit = {
- columnsLen = headRow.length
- //设置header的格式
- val headerFont = wb.createFont()
- headerFont.setBold(true)
- headerFont.setFontHeightInPoints(14)
- headerFont.setColor(IndexedColors.RED.getIndex())
- val headerCellStyle = wb.createCellStyle()
- headerCellStyle.setFont(headerFont)
- val row = sheet.createRow(rowNum)
- for (i <- headRow.indices) {
- createCell(row, i, headRow(i), headerCellStyle)
- }
- rowNum = rowNum + 1
- }
-
- def writerRow(line: Seq[Any],exportNullValue:String): Unit = {
- val row = sheet.createRow(rowNum)
- for (i <- line.indices) {
- createCell(row, i, line(i),exportNullValue)
- }
- rowNum = rowNum + 1
- }
-
- def createCell(row: Row, col: Int, value: String, cellStyle: CellStyle): Unit = {
- val cell = row.createCell(col)
- cell.setCellValue(value)
- cell.setCellStyle(cellStyle)
- }
-
- def setDateValue(cell:Cell, date: Date): Unit ={
- cell.setCellStyle(sdf)
- cell.setCellValue(date)
- }
- def setTimestampValue(cell:Cell, date: Date): Unit ={
- cell.setCellStyle(stf)
- cell.setCellValue(date)
- }
-
- def createCell(row: Row, col: Int, value: Any,exportNullValue:String): Unit = {
- val cell = row.createCell(col)
- value match {
- case t: java.sql.Timestamp => setTimestampValue(cell, new Date(t.getTime))
- case d: java.sql.Date => setDateValue(cell, new Date(d.getTime))
- case s: String => {
- if(("NULL".equals(s) || "".equals(s)) && !"SHUFFLEOFF".equals(exportNullValue))
- cell.setCellValue(exportNullValue)
- else
- cell.setCellValue(s)
- }
- case f: Float => cell.setCellValue(f)
- case d: Double => cell.setCellValue(d)
- case b: Boolean => cell.setCellValue(b)
- case b: Byte => cell.setCellValue(b)
- case s: Short => cell.setCellValue(s)
- case i: Int => cell.setCellValue(i)
- case l: Long => cell.setCellValue(l)
- case b: BigDecimal => cell.setCellValue(b.doubleValue())
- case b: java.math.BigDecimal => cell.setCellValue(b.doubleValue())
- case null => if("SHUFFLEOFF".equals(exportNullValue)) cell.setCellValue("NULL") else cell.setCellValue(exportNullValue)
- case _ => cell.setCellValue(value.toString)
- }
- }
-
- def close(outputStream: OutputStream): Unit = {
- try {
- sheet.trackAllColumnsForAutoSizing()
- for (i <- 0 until columnsLen) {
- sheet.autoSizeColumn(i)
- }
- wb.write(outputStream);
- } catch {
- case e: Throwable =>
- throw e
- } finally {
- outputStream.close()
- wb.close()
- }
- }
-}
diff --git a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/ExcelRelation.scala b/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/ExcelRelation.scala
deleted file mode 100644
index 445bfaf..0000000
--- a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/ExcelRelation.scala
+++ /dev/null
@@ -1,407 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.spark.excel
-
-import java.math.BigDecimal
-import java.sql.Timestamp
-import java.text.SimpleDateFormat
-import java.util.Locale
-
-import com.monitorjbl.xlsx.StreamingReader
-import org.apache.commons.lang.StringUtils
-import org.apache.hadoop.fs.{FileSystem, Path}
-import org.apache.poi.ss.usermodel.{Cell, CellType, DataFormatter, DateUtil, Sheet, Workbook, WorkbookFactory, Row => SheetRow}
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql._
-import org.apache.spark.sql.sources._
-import org.apache.spark.sql.types._
-
-import scala.collection.JavaConverters._
-import scala.util.{Failure, Success, Try}
-
-case class ExcelRelation(
- location: String,
- sheetName: Option[String],
- useHeader: Boolean,
- treatEmptyValuesAsNulls: Boolean,
- inferSheetSchema: Boolean,
- addColorColumns: Boolean = true,
- userSchema: Option[StructType] = None,
- startColumn: Int = 0,
- endColumn: Int = Int.MaxValue,
- timestampFormat: Option[String] = None,
- maxRowsInMemory: Option[Int] = None,
- excerptSize: Int = 10,
- parameters: Map[String, String] = null,dateFormat: List[String],indexes:Array[Int]
-)(@transient val sqlContext: SQLContext)
- extends BaseRelation
- with TableScan
- with PrunedScan{
-
- private val path = new Path(location)
-
- def extractCells(row: org.apache.poi.ss.usermodel.Row): Vector[Option[Cell]] =
- row.eachCellIterator(startColumn, endColumn).to[Vector]
-
- private def openWorkbook(): Workbook = {
-
- val fs = FileSystem.get(path.toUri, sqlContext.sparkContext.hadoopConfiguration)
- // if failed try to refresh nfs cache
- val inputStream =
- try{
- fs.open(path)
- } catch {
- case e:Throwable =>
- fs.listFiles(path.getParent, false)
- fs.open(path)
- case _ =>
- fs.listFiles(path.getParent, false)
- fs.open(path)
- }
-
- maxRowsInMemory
- .map { maxRowsInMem =>
- StreamingReader
- .builder()
- .rowCacheSize(maxRowsInMem)
- .bufferSize(maxRowsInMem * 1024)
- .open(inputStream)
- }
- .getOrElse(WorkbookFactory.create(inputStream))
- }
-
- /**
- * 抽取10row 用于自动推断
- * @return
- */
- private def getExcerpt(): (SheetRow, List[SheetRow]) = {
- val workbook = openWorkbook()
- val sheet = findSheet(workbook, sheetName)
- val sheetIterator = sheet.iterator.asScala
- var currentRow: org.apache.poi.ss.usermodel.Row = null
- while (sheetIterator.hasNext && currentRow == null) {
- currentRow = sheetIterator.next
- }
- if (currentRow == null) {
- workbook.close()
- throw new RuntimeException(s"Sheet $sheetName in $path doesn't seem to contain any data")
- }
- val firstRow = currentRow
- var counter = 0
- val excerpt = new Array[org.apache.poi.ss.usermodel.Row](excerptSize)
- while (sheetIterator.hasNext && counter < excerptSize) {
- excerpt(counter) = sheetIterator.next
- counter += 1
- }
- workbook.close()
- (firstRow, excerpt.take(counter).to[List])
- }
-
- /**
- * 删掉已经自动推断的行
- * @param wb
- * @param excerptSize
- * @return
- */
- private def restIterator(wb: Workbook, excerptSize: Int): Iterator[org.apache.poi.ss.usermodel.Row] = {
- val sheet = findSheet(wb, sheetName)
- val i = sheet.iterator.asScala
- i.drop(excerptSize + 1)
- i
- }
-
- /**
- * 返回推断后的全部数据
- * @param workbook
- * @param firstRowWithData
- * @param excerpt
- * @return
- */
- private def dataIterator(workbook: Workbook, firstRowWithData: SheetRow, excerpt: List[SheetRow]) = {
- val init = if (useHeader) excerpt else firstRowWithData :: excerpt
- init.iterator ++ restIterator(workbook, excerpt.size)
- }
-
- private def getFirstRow(wb:Workbook):org.apache.poi.ss.usermodel.Row ={
- val names = sheetName.getOrElse("sheet1")
- var sheet:Sheet = null
- if("sheet1".equals(names) || ! names.contains(',')){
- sheet = findSheet(wb, sheetName)
- } else {
- val sheetName = names.split(',')(0)
- sheet = findSheet(wb,Some(sheetName))
- }
- val i = sheet.iterator.asScala
- if(i.hasNext)i.next() else null
- }
-
-
- /**
- * 遍历所有sheet
- * @param wb
- * @return
- */
- private def dataAllIterator(wb: Workbook): Iterator[org.apache.poi.ss.usermodel.Row] = {
- val names = sheetName.getOrElse("sheet1")
- if("sheet1".equals(names) || ! names.contains(',')){
- val sheet = findSheet(wb, sheetName)
- val i = sheet.iterator.asScala
- if (useHeader) i.drop(1)
- i
- } else {
- val sheetNames = names.split(',')
- var resIterator = Iterator[org.apache.poi.ss.usermodel.Row]()
- for(name <- sheetNames){
- val sheet = findSheet(wb, Some(name))
- val i = sheet.iterator.asScala
- if (useHeader) i.drop(1)
- resIterator = resIterator ++ i
- }
- resIterator
- }
- }
-
- override val schema: StructType = inferSchema
-
- /**
- * 数据格式转换
- */
- val dataFormatter = new DataFormatter()
-
- //val dateFormat: SimpleDateFormat = new SimpleDateFormat(parameters.getOrElse( "dateFormat", "yyyy-MM-dd"), Locale.US)
-
- val nullValue =parameters.getOrElse( "nullValue", " ")
- val nanValue = parameters.getOrElse( "nanValue", "null")
- val quote = parameters.getOrElse( "quote", "\"")
- val escape = parameters.getOrElse( "escape", "\\")
- val escapeQuotes = parameters.getOrElse("escapeQuotes", false)
-
- val timestampParser = if (timestampFormat.isDefined) {
- Some(new SimpleDateFormat(timestampFormat.get))
- } else {
- None
- }
-
-
- /**
- * 获得对应的sheet
- * @param workBook
- * @param sheetName
- * @return
- */
- private def findSheet(workBook: Workbook, sheetName: Option[String]): Sheet = {
- sheetName
- .map { sn =>
- Option(workBook.getSheet(sn)).getOrElse(throw new IllegalArgumentException(s"Unknown sheet $sn"))
- }
- .getOrElse(workBook.sheetIterator.next)
- }
-
- /**
- * build Row
- * @return
- */
- override def buildScan: RDD[Row] = buildScan(schema.map(_.name).toArray)
-
- override def buildScan(requiredColumns: Array[String]): RDD[Row] = {
- val lookups = requiredColumns
- .map { c =>
- val columnNameRegex = s"(.*?)(_color)?".r
- val columnNameRegex(columnName, isColor) = c
- val columnIndex = schema.indexWhere(_.name == columnName)
-
- val cellExtractor: Cell => Any = if (isColor == null) {
- castTo(_, schema(columnIndex).dataType,new SimpleDateFormat(dateFormat(columnIndex),Locale.US),null)
- } else {
- _.getCellStyle.getFillForegroundColorColor match {
- case null => ""
- case c: org.apache.poi.xssf.usermodel.XSSFColor => c.getARGBHex
- case c => throw new RuntimeException(s"Unknown color type $c: ${c.getClass}")
- }
- }
- { row: SheetRow =>
- val index = indexes(columnIndex)
- val cell = row.getCell(index + startColumn)
- if (cell == null) {
- null
- } else {
- cellExtractor(cell)
- }
- }
- }
- .to[Vector]
- val workbook = openWorkbook()
- val rows = dataAllIterator(workbook).map(row => lookups.map(l => l(row)))
- val result = rows.to[Vector]
- val rdd = sqlContext.sparkContext.parallelize(result.map(Row.fromSeq))
- workbook.close()
- rdd
- }
-
- private def stringToDouble(value: String): Double = {
- Try(value.toDouble) match {
- case Success(d) => d
- case Failure(_) => Double.NaN
- }
- }
-
- private def castTo(cell: Cell, castType: DataType,dateFormat: SimpleDateFormat,timestampFormat:SimpleDateFormat): Any = {
- val cellType = cell.getCellTypeEnum
- if (cellType == CellType.BLANK) {
- return null
- }
-
- lazy val dataFormatter = new DataFormatter()
- lazy val stringValue =
- cell.getCellTypeEnum match {
- case CellType.FORMULA =>
- cell.getCachedFormulaResultTypeEnum match {
- case CellType.STRING => cell.getRichStringCellValue.getString
- case CellType.NUMERIC => cell.getNumericCellValue.toString
- case _ => dataFormatter.formatCellValue(cell)
- }
- case CellType.ERROR => ""
- case _ => dataFormatter.formatCellValue(cell)
- }
- lazy val numericValue =
- cell.getCellTypeEnum match {
- case CellType.NUMERIC => cell.getNumericCellValue
- case CellType.STRING => stringToDouble(cell.getStringCellValue)
- case CellType.FORMULA =>
- cell.getCachedFormulaResultTypeEnum match {
- case CellType.NUMERIC => cell.getNumericCellValue
- case CellType.STRING => stringToDouble(cell.getRichStringCellValue.getString)
- }
- }
- lazy val bigDecimal = new BigDecimal(numericValue)
- castType match {
- case _: ByteType => numericValue.toByte
- case _: ShortType => numericValue.toShort
- case _: IntegerType => numericValue.toInt
- case _: LongType => numericValue.toLong
- case _: FloatType => numericValue.toFloat
- case _: DoubleType => numericValue
- case _: BooleanType => cell.getBooleanCellValue
- case _: DecimalType => if (cellType == CellType.STRING && (cell.getStringCellValue == "" || "null".equalsIgnoreCase(cell.getStringCellValue))) null else bigDecimal
- case _: TimestampType =>
- cellType match {
- case CellType.NUMERIC => new Timestamp(DateUtil.getJavaDate(numericValue).getTime)
- case _ => parseTimestamp(stringValue)
- }
- case _: DateType =>
- if("null".equalsIgnoreCase(cell.getStringCellValue)||StringUtils.isEmpty(cell.getStringCellValue)){
- null
- }else{
- new java.sql.Date(Try(dateFormat.parse(cell.getStringCellValue).getTime).getOrElse(DateUtil.getJavaDate(numericValue).getTime))}
-
- case _: StringType => stringValue.replaceAll("\n|\t", " ")
- case t => throw new RuntimeException(s"Unsupported cast from $cell to $t")
- }
- }
-
- private def parseTimestamp(stringValue: String): Timestamp = {
- timestampParser match {
- case Some(parser) => new Timestamp(parser.parse(stringValue).getTime)
- case None => Timestamp.valueOf(stringValue)
- }
- }
-
- private def getSparkType(cell: Option[Cell]): DataType = {
- cell match {
- case Some(c) =>
- c.getCellTypeEnum match {
- case CellType.FORMULA =>
- c.getCachedFormulaResultTypeEnum match {
- case CellType.STRING => StringType
- case CellType.NUMERIC => DoubleType
- case _ => NullType
- }
- case CellType.STRING if c.getStringCellValue == "" || "null".equalsIgnoreCase(c.getStringCellValue) => NullType
- case CellType.STRING => StringType
- case CellType.BOOLEAN => BooleanType
- case CellType.NUMERIC => if (DateUtil.isCellDateFormatted(c)) TimestampType else DoubleType
- case CellType.BLANK => NullType
- }
- case None => NullType
- }
- }
-
- private def parallelize[T : scala.reflect.ClassTag](seq: Seq[T]): RDD[T] = sqlContext.sparkContext.parallelize(seq)
-
- /**
- * Generates a header from the given row which is null-safe and duplicate-safe.
- */
- protected def makeSafeHeader(row: Array[String]): Array[String] = {
- if (useHeader) {
- val duplicates = {
- val headerNames = row
- .filter(_ != null)
- headerNames.diff(headerNames.distinct).distinct
- }
-
- row.zipWithIndex.map {
- case (value, index) =>
- if (value == null || value.isEmpty) {
- // When there are empty strings or the, put the index as the suffix.
- s"_c$index"
- } else if (duplicates.contains(value)) {
- // When there are duplicates, put the index as the suffix.
- s"$value$index"
- } else {
- value
- }
- }
- } else {
- row.zipWithIndex.map {
- case (_, index) =>
- // Uses default column names, "_c#" where # is its position of fields
- // when header option is disabled.
- s"_c$index"
- }
- }
- }
-
- private def inferSchema(): StructType = this.userSchema.getOrElse {
- val (firstRowWithData, excerpt) = getExcerpt()
-
- val rawHeader = extractCells(firstRowWithData).map {
- case Some(value) => value.getStringCellValue
- case _ => ""
- }.toArray
-
- val header = makeSafeHeader(rawHeader)
-
- val baseSchema = if (this.inferSheetSchema) {
- val stringsAndCellTypes = excerpt
- .map(r => extractCells(r).map(getSparkType))
- InferSchema(parallelize(stringsAndCellTypes), header.toArray)
- } else {
- // By default fields are assumed to be StringType
- val schemaFields = header.map { fieldName =>
- StructField(fieldName.toString, StringType, nullable = true)
- }
- StructType(schemaFields)
- }
- if (addColorColumns) {
- header.foldLeft(baseSchema) { (schema, header) =>
- schema.add(s"${header}_color", StringType, nullable = true)
- }
- } else {
- baseSchema
- }
- }
-}
diff --git a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/InferSchema.scala b/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/InferSchema.scala
deleted file mode 100644
index 2501774..0000000
--- a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/InferSchema.scala
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.spark.excel
-
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.types._
-
-
-private[excel] object InferSchema {
-
- type CellType = Int
-
- /**
- * Similar to the JSON schema inference.
- * [[org.apache.spark.sql.execution.datasources.json.InferSchema]]
- * 1. Infer type of each row
- * 2. Merge row types to find common type
- * 3. Replace any null types with string type
- */
- def apply(rowsRDD: RDD[Seq[DataType]], header: Array[String]): StructType = {
- val startType: Array[DataType] = Array.fill[DataType](header.length)(NullType)
- val rootTypes: Array[DataType] = rowsRDD.aggregate(startType)(inferRowType _, mergeRowTypes)
-
- val structFields = header.zip(rootTypes).map {
- case (thisHeader, rootType) =>
- val dType = rootType match {
- case z: NullType => StringType
- case other => other
- }
- StructField(thisHeader, dType, nullable = true)
- }
- StructType(structFields)
- }
-
- private def inferRowType(rowSoFar: Array[DataType], next: Seq[DataType]): Array[DataType] = {
- var i = 0
- while (i < math.min(rowSoFar.length, next.size)) { // May have columns on right missing.
- rowSoFar(i) = inferField(rowSoFar(i), next(i))
- i += 1
- }
- rowSoFar
- }
-
- private[excel] def mergeRowTypes(first: Array[DataType], second: Array[DataType]): Array[DataType] = {
- first.zipAll(second, NullType, NullType).map {
- case ((a, b)) =>
- findTightestCommonType(a, b).getOrElse(NullType)
- }
- }
-
- /**
- * Infer type of string field. Given known type Double, and a string "1", there is no
- * point checking if it is an Int, as the final type must be Double or higher.
- */
- private[excel] def inferField(typeSoFar: DataType, field: DataType): DataType = {
- // Defining a function to return the StringType constant is necessary in order to work around
- // a Scala compiler issue which leads to runtime incompatibilities with certain Spark versions;
- // see issue #128 for more details.
- def stringType(): DataType = {
- StringType
- }
-
- if (field == NullType) {
- typeSoFar
- } else {
- (typeSoFar, field) match {
- case (NullType, ct) => ct
- case (DoubleType, DoubleType) => DoubleType
- case (BooleanType, BooleanType) => BooleanType
- case (TimestampType, TimestampType) => TimestampType
- case (StringType, _) => stringType()
- case (_, _) => stringType()
- }
- }
- }
-
-
- private val numericPrecedence: IndexedSeq[DataType] =
- IndexedSeq[DataType](ByteType, ShortType, IntegerType, LongType, FloatType, DoubleType, TimestampType)
-
-
- val findTightestCommonType: (DataType, DataType) => Option[DataType] = {
- case (t1, t2) if t1 == t2 => Some(t1)
- case (NullType, t1) => Some(t1)
- case (t1, NullType) => Some(t1)
- case (StringType, t2) => Some(StringType)
- case (t1, StringType) => Some(StringType)
-
- // Promote numeric types to the highest of the two and all numeric types to unlimited decimal
- case (t1, t2) if Seq(t1, t2).forall(numericPrecedence.contains) =>
- val index = numericPrecedence.lastIndexWhere(t => t == t1 || t == t2)
- Some(numericPrecedence(index))
-
- case _ => None
- }
-}
diff --git a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/package.scala b/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/package.scala
deleted file mode 100644
index adbf7e5..0000000
--- a/linkis-extensions/spark-excel/src/main/scala/com/webank/wedatasphere/spark/excel/package.scala
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright 2019 WeBank
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.spark
-
-import org.apache.poi.ss.usermodel.Row.MissingCellPolicy
-import org.apache.poi.ss.usermodel.{Cell, Row}
-
-package object excel {
-
- implicit class RichRow(val row: Row) extends AnyVal {
-
- def eachCellIterator(startColumn: Int, endColumn: Int): Iterator[Option[Cell]] = new Iterator[Option[Cell]] {
- private val lastCellInclusive = row.getLastCellNum - 1
- private val endCol = Math.min(endColumn, Math.max(startColumn, lastCellInclusive))
- require(startColumn >= 0 && startColumn <= endCol)
-
- private var nextCol = startColumn
-
- override def hasNext: Boolean = nextCol <= endCol && nextCol <= lastCellInclusive
-
- override def next(): Option[Cell] = {
- val next =
- if (nextCol > endCol) throw new NoSuchElementException(s"column index = $nextCol")
- else Option(row.getCell(nextCol, MissingCellPolicy.RETURN_NULL_AND_BLANK))
- nextCol += 1
- next
- }
- }
-
- }
-
-}
diff --git a/linkis-orchestrator/linkis-code-orchestrator/pom.xml b/linkis-orchestrator/linkis-code-orchestrator/pom.xml
index 5277210..24e4c6d 100644
--- a/linkis-orchestrator/linkis-code-orchestrator/pom.xml
+++ b/linkis-orchestrator/linkis-code-orchestrator/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-orchestrator/linkis-code-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/code/plans/logical/CodeLogicalUnitTaskDesc.scala b/linkis-orchestrator/linkis-code-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/code/plans/logical/CodeLogicalUnitTaskDesc.scala
index 88719e1..53b7b27 100644
--- a/linkis-orchestrator/linkis-code-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/code/plans/logical/CodeLogicalUnitTaskDesc.scala
+++ b/linkis-orchestrator/linkis-code-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/code/plans/logical/CodeLogicalUnitTaskDesc.scala
@@ -16,11 +16,18 @@
package com.webank.wedatasphere.linkis.orchestrator.code.plans.logical
-import com.webank.wedatasphere.linkis.orchestrator.plans.ast.Stage
-import com.webank.wedatasphere.linkis.orchestrator.plans.logical.{StageTaskDesc, TaskDesc}
+import com.webank.wedatasphere.linkis.orchestrator.plans.ast.{ASTOrchestration, Job}
+import com.webank.wedatasphere.linkis.orchestrator.plans.logical.{Origin, TaskDesc}
-case class CodeLogicalUnitTaskDesc(override val stage: Stage) extends StageTaskDesc {
- override val position: Int = 0
- override def copy(): TaskDesc = CodeLogicalUnitTaskDesc(stage)
+case class CodeLogicalUnitTaskDesc(job: Job) extends TaskDesc {
+
+ private val position: Int = 0
+
+ override def copy(): TaskDesc = CodeLogicalUnitTaskDesc(job)
+
+ private val origin = Origin(job, position)
+
+ override def getOrigin: Origin = origin
+
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/pom.xml b/linkis-orchestrator/linkis-computation-orchestrator/pom.xml
index 3486a7c..cc4ab4c 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/pom.xml
+++ b/linkis-orchestrator/linkis-computation-orchestrator/pom.xml
@@ -22,7 +22,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactory.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactory.scala
index ee6f952..b37e632 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactory.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactory.scala
@@ -35,7 +35,7 @@
def createSessionBuilder(id: String): OrchestratorSessionBuilder
- def getOrCreateSession(orchestratorSessionBuider: OrchestratorSessionBuilder): OrchestratorSession
+ def getOrCreateSession(orchestratorSessionBuilder: OrchestratorSessionBuilder): OrchestratorSession
}
object ComputationOrchestratorSessionFactory {
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactoryImpl.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactoryImpl.scala
index ef3867d..f8d3f68 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactoryImpl.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/ComputationOrchestratorSessionFactoryImpl.scala
@@ -100,14 +100,20 @@
override def apply(v1: CatalystExtensions): Unit = {
v1.injectConverterTransform(codeConverterTransformBuilder)
+
v1.injectParserTransform(enrichLabelParserTransformBuilder)
+
v1.injectParserTransform(codeStageParserTransformBuilder)
+
v1.injectPlannerTransform(taskPlannerTransformBuilder)
+
//v1.injectOptimizerTransform(simplyOptimizerTransformBuilder)
+
v1.injectPhysicalTransform(jobExecTaskTransformBuilder)
v1.injectPhysicalTransform(stageExecTaskTransformBuilder)
v1.injectPhysicalTransform(cacheExecTaskTransformBuilder)
v1.injectPhysicalTransform(codeExecTaskTransformBuilder)
+
v1.injectReheaterTransform(PruneTaskRetryTransformBuilder)
}
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/PlaybackOptimizer.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/PlaybackOptimizer.scala
index 01bf26e..cb07150 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/PlaybackOptimizer.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/PlaybackOptimizer.scala
@@ -28,8 +28,8 @@
*/
class PlaybackOptimizer extends OptimizerTransform with Logging {
- override def apply(in: Task, context: LogicalContext): Task = ???
+ override def apply(in: Task, context: LogicalContext): Task = null
- override def getName: String = ???
+ override def getName: String = "PlaybackOptimizer"
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/YarnQueueOptimizer.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/YarnQueueOptimizer.scala
index c575605..12bf651 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/YarnQueueOptimizer.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/optimizer/YarnQueueOptimizer.scala
@@ -28,7 +28,7 @@
*/
class YarnQueueOptimizer extends OptimizerTransform with Logging {
- override def apply(in: Task, context: LogicalContext): Task = ???
+ override def apply(in: Task, context: LogicalContext): Task = null
- override def getName: String = ???
+ override def getName: String = "YarnQueueOptimizer"
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/physical/AbstractPhysicalTransform.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/physical/AbstractPhysicalTransform.scala
index 8620952..4063891 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/physical/AbstractPhysicalTransform.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/physical/AbstractPhysicalTransform.scala
@@ -157,25 +157,25 @@
override def canExecute: Boolean = false
- override def execute(): TaskResponse = ???
+ override def execute(): TaskResponse = null
- override def isLocalMode: Boolean = ???
+ override def isLocalMode: Boolean = true
- override def getPhysicalContext: PhysicalContext = ???
+ override def getPhysicalContext: PhysicalContext = null
- override def initialize(physicalContext: PhysicalContext): Unit = ???
+ override def initialize(physicalContext: PhysicalContext): Unit = {}
- override def getParents: Array[ExecTask] = ???
+ override def getParents: Array[ExecTask] = null
- override def getChildren: Array[ExecTask] = ???
+ override def getChildren: Array[ExecTask] = null
- override protected def newNode(): ExecTask = ???
+ override protected def newNode(): ExecTask = null
- override def verboseString: String = ???
+ override def verboseString: String = null
- override def withNewChildren(children: Array[ExecTask]): Unit = ???
+ override def withNewChildren(children: Array[ExecTask]): Unit = {}
- override def withNewParents(parents: Array[ExecTask]): Unit = ???
+ override def withNewParents(parents: Array[ExecTask]): Unit = {}
- override def getId: String = ???
+ override def getId: String = "id"
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala
index 2bade28..c8cd616 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/planner/TaskPlannerTransform.scala
@@ -33,32 +33,35 @@
*/
class TaskPlannerTransform extends PlannerTransform with Logging {
- private var startJobTask: JobTask = _
-
def rebuildTreeNode(tmpTask: Task): Task = {
- tmpTask.getChildren.foreach(_.withNewParents(Array(tmpTask)))
+ tmpTask.getChildren.foreach(child => {
+ val newParents = child.getParents.clone() :+ tmpTask
+ child.withNewParents(newParents)
+ })
tmpTask
}
- def buildCodeLogicTaskTree(codeLogicalUnit: CodeLogicalUnit = null, stage: Stage): Task = {
- val codeLogicalUnitTaskTmp = new CodeLogicalUnitTask(Array(), Array(buildStageTaskTree(StartStageTaskDesc(stage))))
- codeLogicalUnitTaskTmp.setTaskDesc(CodeLogicalUnitTaskDesc(stage))
+ def buildCodeLogicTaskTree(codeLogicalUnit: CodeLogicalUnit = null, stage: Stage, startJobTask: Task = null): (Task, Task) = {
+ val (stageTask, newStartJobTask) = buildStageTaskTree(StartStageTaskDesc(stage), startJobTask)
+ val codeLogicalUnitTaskTmp = new CodeLogicalUnitTask(Array(), Array(stageTask))
+ codeLogicalUnitTaskTmp.setTaskDesc(CodeLogicalUnitTaskDesc(stage.getJob))
if(codeLogicalUnit != null) codeLogicalUnitTaskTmp.setCodeLogicalUnit(codeLogicalUnit)
- rebuildTreeNode(codeLogicalUnitTaskTmp)
+ (rebuildTreeNode(codeLogicalUnitTaskTmp), newStartJobTask)
}
- def buildStageTaskTree(taskDesc: StageTaskDesc): Task = {
+ def buildStageTaskTree(taskDesc: StageTaskDesc, startJobTask: Task = null): (Task,Task) = {
taskDesc match {
case endStageTask: EndStageTaskDesc => {
- val stageTaskTmp = new StageTask(Array(), Array(buildCodeLogicTaskTree(taskDesc.stage.getJob match{
+ val (task, newStartJobTask) = buildCodeLogicTaskTree(taskDesc.stage.getJob match{
case codeJob: CodeJob => codeJob.getCodeLogicalUnit
case job: Job => {
error(s"jobId:${job.getId}-----jobType:${job.getName}, job type mismatch, only support CodeJob")
null
}
- }, taskDesc.stage)))
+ }, taskDesc.stage, startJobTask)
+ val stageTaskTmp = new StageTask(Array(), Array(task))
stageTaskTmp.setTaskDesc(endStageTask)
- rebuildTreeNode(stageTaskTmp)
+ (rebuildTreeNode(stageTaskTmp), newStartJobTask)
}
case startStageTask: StartStageTaskDesc => {
/** when the construction node arrives at stage-task-start
@@ -66,28 +69,38 @@
* if true -> use the same way to build another stage tasks
* if false -> build or reuse job-task-start and points to the stage-task-start
* */
- if(null == taskDesc.stage.getChildren || taskDesc.stage.getChildren.isEmpty){
+ if(null == startStageTask.stage.getChildren || startStageTask.stage.getChildren.isEmpty){
+ var newStartJobTask: Task = null
val stageTaskTmp = new StageTask(Array(),
- if(startJobTask != null) Array(startJobTask)
- else Array(buildJobTaskTree(StartJobTaskDesc(startStageTask.stage.getJob))))
+ if(startJobTask == null) {
+ newStartJobTask = buildJobTaskTree(StartJobTaskDesc(startStageTask.stage.getJob))
+ Array(newStartJobTask)
+ }
+ else {
+ newStartJobTask = startJobTask
+ Array(newStartJobTask)
+ })
stageTaskTmp.setTaskDesc(startStageTask)
- rebuildTreeNode(stageTaskTmp)
+ (rebuildTreeNode(stageTaskTmp), newStartJobTask)
}else{
- val stageTaskTmp = new StageTask(Array(), buildAllStageTaskTree(taskDesc.stage.getChildren))
+ val (stageTasks, newStartJobTask) = buildAllStageTaskTree(taskDesc.stage.getChildren, startJobTask)
+ val stageTaskTmp = new StageTask(Array(), stageTasks)
stageTaskTmp.setTaskDesc(taskDesc)
- rebuildTreeNode(stageTaskTmp)
+ (rebuildTreeNode(stageTaskTmp), newStartJobTask)
}
}
}
}
- def buildAllStageTaskTree(stages: Array[Stage]): Array[Task] = {
+ def buildAllStageTaskTree(stages: Array[Stage], startJobTask: Task = null): (Array[Task], Task) = {
val stageTasks = ArrayBuffer[Task]()
- stages.foreach(stage => {
- val stageTask = buildStageTaskTree(EndStageTaskDesc(stage))
- stageTasks += stageTask
- })
- stageTasks.toArray
+ var reusedStartJobTask: Task = startJobTask
+ stages.foreach(stage => {
+ val (stageTask, startJobTask) = buildStageTaskTree(EndStageTaskDesc(stage), reusedStartJobTask)
+ reusedStartJobTask = startJobTask
+ stageTasks += stageTask
+ })
+ (stageTasks.toArray, reusedStartJobTask)
}
def buildJobTaskTree(taskDesc: TaskDesc): Task = {
@@ -98,11 +111,10 @@
*/
val jobTask = new JobTask(Array(), Array())
jobTask.setTaskDesc(startTask)
- startJobTask = jobTask
jobTask
}
case endTask: EndJobTaskDesc => {
- val jobTaskTmp = new JobTask(Array(), buildAllStageTaskTree(endTask.job.getAllStages))
+ val jobTaskTmp = new JobTask(Array(), buildAllStageTaskTree(endTask.job.getAllStages)._1)
jobTaskTmp.setTaskDesc(endTask)
rebuildTreeNode(jobTaskTmp)
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/reheater/PruneTaskReheaterTransform.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/reheater/PruneTaskReheaterTransform.scala
index 2b4dee0..621d747 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/reheater/PruneTaskReheaterTransform.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/reheater/PruneTaskReheaterTransform.scala
@@ -18,11 +18,13 @@
package com.webank.wedatasphere.linkis.orchestrator.computation.catalyst.reheater
import com.webank.wedatasphere.linkis.common.exception.LinkisRetryException
+import com.webank.wedatasphere.linkis.common.log.LogUtils
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.orchestrator.computation.conf.ComputationOrchestratorConf
import com.webank.wedatasphere.linkis.orchestrator.computation.utils.TreeNodeUtil
import com.webank.wedatasphere.linkis.orchestrator.execution.FailedTaskResponse
import com.webank.wedatasphere.linkis.orchestrator.extensions.catalyst.ReheaterTransform
+import com.webank.wedatasphere.linkis.orchestrator.listener.task.TaskLogEvent
import com.webank.wedatasphere.linkis.orchestrator.plans.physical.{ExecTask, PhysicalContext, PhysicalOrchestration, ReheatableExecTask, RetryExecTask}
/**
@@ -30,10 +32,11 @@
*
*/
class PruneTaskRetryTransform extends ReheaterTransform with Logging{
+
override def apply(in: ExecTask, context: PhysicalContext): ExecTask = {
val failedTasks = TreeNodeUtil.getAllFailedTaskNode(in)
failedTasks.foreach(task => {
- info(s"task:${in.getId} has ${failedTasks.size} child tasks which execute failed, some of them may be retried")
+ info(s"task:${in.getIDInfo()} has ${failedTasks.size} child tasks which execute failed, some of them may be retried")
TreeNodeUtil.getTaskResponse(task) match {
case response: FailedTaskResponse => {
val exception = response.getCause
@@ -46,12 +49,13 @@
Utils.tryCatch{
task match {
case retryExecTask: RetryExecTask => {
- if (retryExecTask.getAge() < Integer.parseInt(ComputationOrchestratorConf.RETRYTASK_MAXIMUM_AGE.getValue)) {
- val newTask = new RetryExecTask(task, retryExecTask.getAge() + 1)
+ if (retryExecTask.getAge() < ComputationOrchestratorConf.RETRYTASK_MAXIMUM_AGE.getValue) {
+ val newTask = new RetryExecTask(retryExecTask.getOriginTask, retryExecTask.getAge() + 1)
newTask.initialize(retryExecTask.getPhysicalContext)
TreeNodeUtil.replaceNode(retryExecTask, newTask)
TreeNodeUtil.removeTaskResponse(retryExecTask)
- info(s"Retry---success to rebuild task node, retry-task:${retryExecTask.getId}, current age is ${newTask.getAge()} ")
+ val logEvent = TaskLogEvent(task, LogUtils.generateInfo(s"Retry---success to rebuild task node:${task.getIDInfo()}, ready to execute new retry-task:${retryExecTask.getIDInfo}, current age is ${newTask.getAge()} "))
+ task.getPhysicalContext.pushLog(logEvent)
}else{
info(s"Retry task: ${retryExecTask.getId} reached maximum age:${retryExecTask.getAge()}, stop to retry it!")
}
@@ -61,17 +65,20 @@
retryExecTask.initialize(task.getPhysicalContext)
TreeNodeUtil.insertNode(parent, task, retryExecTask)
TreeNodeUtil.removeTaskResponse(task)
- info(s"Retry---success to rebuild task node, retry-task:${task.getId}, current age is ${retryExecTask.getAge()} ")
+ val logEvent = TaskLogEvent(task, LogUtils.generateInfo(s"Retry---success to rebuild task node:${task.getIDInfo}, ready to execute new retry-task:${retryExecTask.getIDInfo}, current age is ${retryExecTask.getAge()} "))
+ task.getPhysicalContext.pushLog(logEvent)
}
}
}{
//restore task node when retry task construction failed
case e: Exception => {
- warn(s"Retry task construction failed, start to restore task node, task node: ${task.getId}, " +
- s"age: ${task match { case retryExecTask: RetryExecTask => retryExecTask.getAge() case _ => 0}}, reason: ${e.getMessage}")
+ val logEvent = TaskLogEvent(task, LogUtils.generateWarn(s"Retry task construction failed, start to restore task node, task node: ${task.getIDInfo}, " +
+ s"age: ${task match { case retryExecTask: RetryExecTask => retryExecTask.getAge() case _ => 0}}, reason: ${e.getMessage}"))
+ task.getPhysicalContext.pushLog(logEvent)
parent.withNewChildren(otherChildren :+ task)
task.withNewParents(otherParents :+ parent)
- info(s"restore task success! task node: ${task.getId}")
+ val downLogEvent = TaskLogEvent(task, LogUtils.generateWarn(s"restore task success! task node: ${task.getIDInfo}"))
+ task.getPhysicalContext.pushLog(downLogEvent)
}
}
})
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/validator/LabelRegularCheckRuler.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/validator/LabelRegularCheckRuler.scala
index c3888f0..1d2c85e 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/validator/LabelRegularCheckRuler.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/catalyst/validator/LabelRegularCheckRuler.scala
@@ -44,9 +44,9 @@
case stage: Stage =>
val stageId = stage.getId
val jobId = stage.getJob.getId
- info(s"${getName}:start to check labels,jobId:${jobId}---stageId:${stageId} ")
+ debug(s"${getName}:start to check labels,jobId:${jobId}---stageId:${stageId} ")
case jobId: Job =>
- info(s"${getName}:start to check labels,jobId:${jobId}")
+ debug(s"${getName}:start to check labels,jobId:${jobId}")
case _ =>
}
val queryLabels = context.getLabels
@@ -55,7 +55,7 @@
val userDefinedLabel = defaultLabel ++ customLabel
userDefinedLabel.foreach(needLabel => {
if(!queryLabels.asScala.exists(_.getClass == needLabel.getClass)){
- warn(s"label:${needLabel.getClass.getCanonicalName} is needed, but there is no definition in the requested labels!" +
+ warn(s"label:${needLabel.getClass.getName} is needed, but there is no definition in the requested labels!" +
s"(请求的标签列表中缺少标签:${needLabel.getLabelKey})!")
missingLabel += needLabel
checkResult = false
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/conf/ComputationOrchestratorConf.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/conf/ComputationOrchestratorConf.scala
index 7e60a0e..19bcb5a 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/conf/ComputationOrchestratorConf.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/conf/ComputationOrchestratorConf.scala
@@ -47,7 +47,7 @@
val LOG_LEN = CommonVars("wds.linkis.computation.orchestrator.log.len", 100)
- val RETRYTASK_MAXIMUM_AGE = CommonVars("wds.linkis.computation.orchestrator.retry.max.age", "3")
+ val RETRYTASK_MAXIMUM_AGE = CommonVars("wds.linkis.computation.orchestrator.retry.max.age", 10)
val ENGINECONN_LASTUPDATE_TIMEOUT = CommonVars("wds.linkis.orchestrator.engine.lastupdate.timeout", new TimeType("5s"))
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutor.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutor.scala
index 962b7c7..4483142 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutor.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutor.scala
@@ -42,5 +42,6 @@
def getMark: Mark = mark
+ override def toString: String = s"engineConn $engineConnExecutor execTask ${execTask.getIDInfo()} mark ${mark.getMarkId()} engineConnTaskId $engineConnTaskId"
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutorManager.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutorManager.scala
index 85bd92f..28a253d 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutorManager.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/CodeExecTaskExecutorManager.scala
@@ -52,10 +52,13 @@
def unLockEngineConn(execTask: CodeLogicalUnitExecTask, execTaskExecutor: CodeExecTaskExecutor): Unit
+ def markECFailed(execTask: CodeLogicalUnitExecTask, executor: CodeExecTaskExecutor): Unit
+
def getAllInstanceToExecutorCache(): mutable.HashMap[ServiceInstance, Array[CodeExecTaskExecutor]]
def getAllExecTaskToExecutorCache(): mutable.HashMap[String, CodeExecTaskExecutor]
+
}
object CodeExecTaskExecutorManager extends Logging{
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala
index 79a9a99..419e9e6 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/execute/DefaultCodeExecTaskExecutorManager.scala
@@ -21,6 +21,7 @@
import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.common.exception.{LinkisRetryException, WarnException}
+import com.webank.wedatasphere.linkis.common.log.LogUtils
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.manager.label.constant.LabelKeyConstant
import com.webank.wedatasphere.linkis.manager.label.entity.Label
@@ -31,6 +32,7 @@
import com.webank.wedatasphere.linkis.orchestrator.ecm.entity.{DefaultMarkReq, LoadBanlanceMarkReq, Mark, MarkReq, Policy}
import com.webank.wedatasphere.linkis.orchestrator.ecm.{EngineConnManager, EngineConnManagerBuilder}
import com.webank.wedatasphere.linkis.orchestrator.exception.{OrchestratorLabelConflictException, OrchestratorUseSameEngineException}
+import com.webank.wedatasphere.linkis.orchestrator.listener.task.TaskLogEvent
import org.apache.commons.lang.StringUtils
import scala.collection.JavaConverters._
@@ -59,7 +61,7 @@
private val waitLock = new Array[Byte](0)
override def askExecutor(execTask: CodeLogicalUnitExecTask, wait: Duration): Option[CodeExecTaskExecutor] = {
- info(s"Start to askExecutor for execId ${execTask.getId}, wait $wait")
+ info(s"Start to askExecutor for execId ${execTask.getIDInfo()}, wait $wait")
val startTime = System.currentTimeMillis()
var retryException: LinkisRetryException = null
var executor: Option[CodeExecTaskExecutor] = None
@@ -72,7 +74,7 @@
case t: Throwable => throw t
} match {
case Some(e) =>
- info(s"Finished to askExecutor for execId ${execTask.getId}")
+ info(s"Finished to askExecutor for execId ${execTask.getIDInfo()}, wait ${System.currentTimeMillis() - startTime}")
executor = Option(e)
case _ =>
if (System.currentTimeMillis - startTime < wait.toMillis) {
@@ -85,55 +87,38 @@
}
override def askExecutor(execTask: CodeLogicalUnitExecTask): Option[CodeExecTaskExecutor] = {
- info(s"Start to askExecutor for execId ${execTask.getId}")
+ debug(s"Start to askExecutor for execId ${execTask.getIDInfo()}")
val executor = createExecutor(execTask)
- info(s"Finished to askExecutor for execId ${execTask.getId}")
+ info(s"Finished to askExecutor for execId ${execTask.getIDInfo()}")
Option(executor)
}
override def createExecutor(execTask: CodeLogicalUnitExecTask): CodeExecTaskExecutor = {
- info(s"Start to createExecutor for execId ${execTask.getId}")
+
val engineConnManager = getEngineConnManager(execTask.getLabels)
// CreateMarkReq
val markReq = createMarkReq(execTask)
- // getMark
- var mark: Mark = null
+
val executeOnceLabel = LabelUtil.getExecuteOnceLabel(execTask.getLabels)
val loadBalanceLabel = LabelUtil.getLoadBalanceLabel(execTask.getLabels)
+
if (null != executeOnceLabel && null != loadBalanceLabel) {
throw new OrchestratorLabelConflictException(s"ExecuteOnceLabel : ${markReq.getLabels.get(LabelKeyConstant.EXECUTE_ONCE_KEY)} should not come with LoadBalanceLabel : ${markReq.getLabels.get(LabelKeyConstant.LOAD_BALANCE_KEY)}")
}
- if(executeOnceLabel != null){
- // todo check ExecuteOnceLabel should not come with LoadBalanceLabel And BindEngineLabel
- mark = engineConnManager.createMark(markReq)
- } else {
- mark = engineConnManager.applyMark(markReq)
- val bindEngineLabel = LabelUtil.getBindEngineLabel(execTask.getLabels)
- if (null != bindEngineLabel) {
- if (null == mark ) {
- if (bindEngineLabel.getIsJobGroupHead) {
- mark = engineConnManager.createMark(markReq)
- } else {
- // todo logupdate()
- throw new OrchestratorUseSameEngineException(s"ExecTask with id : ${execTask.getId} has bingEngineLabel : ${bindEngineLabel.getStringValue}, but cannot find the pre-built mark.")
- }
- }
- } else {
- if (null == mark) {
- mark = engineConnManager.createMark(markReq)
- }
- }
- }
+
+ // getMark
+ val mark: Mark = engineConnManager.applyMark(markReq)
+ markReq.setCreateService(markReq.getCreateService + s"mark_id: ${mark.getMarkId()}")
// getEngineConn Executor
- info(s"createExecutor for execId ${execTask.getId} mark id is ${mark.getMarkId()}, user ${mark.getMarkReq.getUser}")
- //TODO LOG job.getLogListener.foreach(_.onLogUpdate(job, "Background is starting a new engine for you, it may take several seconds, please wait"))
+ info(s"create Executor for execId ${execTask.getIDInfo()} mark id is ${mark.getMarkId()}, user ${mark.getMarkReq.getUser}")
+ execTask.getPhysicalContext.pushLog(TaskLogEvent(execTask, LogUtils.generateInfo("Background is starting a new engine for you, it may take several seconds, please wait")))
val engineConnExecutor = engineConnManager.getAvailableEngineConnExecutor(mark)
if (null == engineConnExecutor) {
return null
}
val codeExecTaskExecutor = new CodeExecTaskExecutor(engineConnExecutor, execTask, mark)
execTaskToExecutor.put(execTask.getId, codeExecTaskExecutor)
- info(s"Finished to createExecutor for execId ${execTask.getId}")
+ info(s"Finished to create Executor for execId ${execTask.getIDInfo()} mark id is ${mark.getMarkId()}, user ${mark.getMarkReq.getUser}")
codeExecTaskExecutor
}
@@ -143,9 +128,10 @@
val markReq: MarkReq = if (null != loadBalanceLabel) {
new LoadBanlanceMarkReq
} else {
- new DefaultMarkReq
+ val defaultMarkReq = new DefaultMarkReq
+ defaultMarkReq.registerLabelKey(LabelKeyConstant.BIND_ENGINE_KEY)
+ defaultMarkReq
}
- markReq.registerLabelKey(LabelKeyConstant.BIND_ENGINE_KEY)
markReq.setPolicyObj(Policy.Task)
markReq.setCreateService(ComputationOrchestratorConf.DEFAULT_CREATE_SERVICE.getValue)
//markReq.setDescription
@@ -189,55 +175,68 @@
}
+ /**
+ * The job execution process is normal. After the job is completed, you can call this method.
+ * This method will determine the bind engine label. If it is a non-end type job, no operation will be performed.
+ * @param execTask
+ * @param executor
+ */
override def delete(execTask: CodeLogicalUnitExecTask, executor: CodeExecTaskExecutor): Unit = {
val jobGroupLabel = LabelUtil.getBindEngineLabel(execTask.getLabels)
- /*val loadBalanceLabel = LabelUtil.getLoadBalanceLabel(execTask.getLabels)
- if (null != loadBalanceLabel) {
- info(s"${execTask.getIDInfo()} task has loadBalanceLabel, Not need to delete executor ${executor.getEngineConnExecutor.getServiceInstance}")
- return
- }*/
var isEndJob = false
var jobGroupId = ""
if (null != jobGroupLabel) {
isEndJob = jobGroupLabel.getIsJobGroupEnd
jobGroupId = jobGroupLabel.getJobGroupId
if (isEndJob) {
- info(s"To delete codeExecTaskExecutor $executor with id : ${executor.getEngineConnExecutor.getServiceInstance} from execTaskToExecutor for lastjob of jobGroupId : ${jobGroupId}")
- clearExecutorById(executor.getExecTaskId, execTask.getLabels)
+ debug(s"To delete codeExecTaskExecutor $executor from execTaskToExecutor for lastjob of jobGroupId : ${jobGroupId}")
+ clearExecutorById(executor, execTask.getLabels)
} else {
- info(s"Subjob is not end of JobGroup with id : ${jobGroupId}, we will not delete codeExecTaskExecutor with id : ${executor.getExecTaskId} ")
+ removeExecutorFromInstanceToExecutors(executor)
+ info(s"Subjob is not end of JobGroup with id : ${jobGroupId}, we will not delete codeExecTaskExecutor with id : ${executor} ")
}
} else {
- info(s"To delete codeExecTaskExecutor $executor with id : ${executor.getEngineConnExecutor.getServiceInstance} from execTaskToExecutor.")
- clearExecutorById(executor.getExecTaskId, execTask.getLabels)
+ debug(s"To delete codeExecTaskExecutor ${executor} from execTaskToExecutor.")
+ clearExecutorById(executor, execTask.getLabels)
}
}
- private def clearExecutorById(id: String, labels: util.List[Label[_]]) = {
- val maybeExecutor = execTaskToExecutor.remove(id)
- maybeExecutor.foreach { codeExecTaskExecutor =>
- info(s"Task $id To release engine ConnExecutor ${codeExecTaskExecutor.getEngineConnExecutor.getServiceInstance}")
- val loadBalanceLabel = LabelUtil.getLoadBalanceLabel(labels)
- if (null == loadBalanceLabel) {
- getEngineConnManager(labels).releaseEngineConnExecutor(codeExecTaskExecutor.getEngineConnExecutor, codeExecTaskExecutor.getMark)
- } else {
- info(s"${id} task has loadBalanceLabel, Not need to delete executor ${codeExecTaskExecutor.getEngineConnExecutor.getServiceInstance}")
- }
- removeExecutorFromInstanceToExecutors(codeExecTaskExecutor)
+ /**
+ * The method is used to clean up the executor, here will trigger the unlock of ec,
+ * but if it is with the loadBlance tag, the unlock step will be skipped
+ * @param executor
+ * @param labels
+ */
+ private def clearExecutorById( executor: CodeExecTaskExecutor, labels: util.List[Label[_]], forceRelease: Boolean = false): Unit = {
+ if (null == executor || executor.getEngineConnExecutor == null) return
+ val loadBalanceLabel = LabelUtil.getLoadBalanceLabel(labels)
+ if (null == loadBalanceLabel || forceRelease) {
+ info(s"To release engine ConnExecutor ${executor}")
+ getEngineConnManager(labels).releaseEngineConnExecutor(executor.getEngineConnExecutor, executor.getMark)
+ } else {
+ info(s"Task has loadBalanceLabel, Not need to delete executor ${executor}")
}
+ removeExecutorFromInstanceToExecutors(executor)
}
- //private def getJobGroupIdKey(jobGroupId: String): String = LabelKeyConstant.JOB_GROUP_ID + "_" + jobGroupId
private def removeExecutorFromInstanceToExecutors(executor: CodeExecTaskExecutor): Unit = {
- info(s"To delete codeExecTaskExecutor $executor from instanceToExecutors")
+ debug(s"To delete codeExecTaskExecutor ${executor} from instanceToExecutors")
val maybeExecutors = instanceToExecutors.get(executor.getEngineConnExecutor.getServiceInstance)
if (maybeExecutors.isDefined) {
val executors = maybeExecutors.get.filter(_.getEngineConnTaskId != executor.getEngineConnTaskId)
- if (null != executors && executors.nonEmpty) {
- instanceToExecutors.put(executor.getEngineConnExecutor.getServiceInstance, executors)
+ instanceToExecutors synchronized {
+ if (null != executors && executors.nonEmpty) {
+ instanceToExecutors.put(executor.getEngineConnExecutor.getServiceInstance, executors)
+ } else {
+ instanceToExecutors.remove(executor.getEngineConnExecutor.getServiceInstance)
+ }
}
}
+ info(s"To delete exec task ${executor.getExecTask.getIDInfo()} and CodeExecTaskExecutor ${executor.getEngineConnExecutor.getServiceInstance} relation")
+ execTaskToExecutor synchronized {
+ execTaskToExecutor.remove(executor.getExecTaskId)
+ }
}
override def addEngineConnTaskID(executor: CodeExecTaskExecutor): Unit = {
@@ -247,7 +246,9 @@
execTaskToExecutor.put(executor.getExecTaskId, codeExecutor)
info(s"To add codeExecTaskExecutor $executor to instanceToExecutors")
val executors = instanceToExecutors.getOrElse(executor.getEngineConnExecutor.getServiceInstance, Array.empty[CodeExecTaskExecutor])
- instanceToExecutors.put(executor.getEngineConnExecutor.getServiceInstance, executors.+:(codeExecutor))
+ instanceToExecutors synchronized {
+ instanceToExecutors.put(executor.getEngineConnExecutor.getServiceInstance, executors.+:(codeExecutor))
+ }
}
}
@@ -263,9 +264,24 @@
override def getAllExecTaskToExecutorCache(): mutable.HashMap[String, CodeExecTaskExecutor] = execTaskToExecutor
+ /**
+ * If the job is executed abnormally, such as execution failure, or being killed,
+ * it will go to the process for cleaning up, and the engineConn lock will be released.
+ * @param execTask
+ * @param execTaskExecutor
+ */
override def unLockEngineConn(execTask: CodeLogicalUnitExecTask, execTaskExecutor: CodeExecTaskExecutor): Unit = {
info(s"${execTask.getIDInfo()} task be killed or failed , Now to delete executor ${execTaskExecutor.getEngineConnExecutor.getServiceInstance}")
- clearExecutorById(execTaskExecutor.getExecTaskId, execTask.getLabels)
+ clearExecutorById(execTaskExecutor, execTask.getLabels)
}
+ /**
+ * Task failed because ec exited unexpectedly, so need to clean up ec immediately
+ * @param execTask
+ * @param executor
+ */
+ override def markECFailed(execTask: CodeLogicalUnitExecTask, executor: CodeExecTaskExecutor): Unit = {
+ info(s"${execTask.getIDInfo()} task failed because executor exit, Now to delete executor ${executor.getEngineConnExecutor.getServiceInstance}")
+ clearExecutorById(executor, execTask.getLabels, true)
+ }
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/monitor/EngineConnMonitor.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/monitor/EngineConnMonitor.scala
index 8b17431..ee886bc 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/monitor/EngineConnMonitor.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/monitor/EngineConnMonitor.scala
@@ -32,8 +32,9 @@
import com.webank.wedatasphere.linkis.server.{BDPJettyServerHelper, toJavaMap}
import java.util
+import java.util.Collections
import java.util.concurrent.TimeUnit
-import scala.collection.JavaConverters.mapAsScalaMapConverter
+import scala.collection.JavaConverters.{asScalaBufferConverter, asScalaSetConverter, mapAsScalaMapConverter}
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
@@ -46,19 +47,16 @@
override def run(): Unit = {
val startTime = System.currentTimeMillis()
val engineExecutorCache = engineConnExecutorCache
- if (engineExecutorCache.size > 0) {
- info(s"Entrance Executor cache num : ${engineExecutorCache.size}")
- }
- val unActivityEngines = new ArrayBuffer[EngineConnExecutor]()
+ val unActivityEngines = Collections.synchronizedSet(new util.HashSet[EngineConnExecutor]())
engineExecutorCache.values.foreach(taskExecutors => taskExecutors.foreach(executor => {
if (startTime - executor.getEngineConnExecutor.getLastUpdateTime() > ComputationOrchestratorConf.ENGINECONN_LASTUPDATE_TIMEOUT.getValue.toLong) {
- unActivityEngines += executor.getEngineConnExecutor
+ unActivityEngines.add(executor.getEngineConnExecutor)
}
}))
if (null != unActivityEngines && !unActivityEngines.isEmpty) {
info(s"There are ${unActivityEngines.size} unActivity engines.")
val engineList = new util.ArrayList[ServiceInstance]()
- unActivityEngines.foreach(engine => {
+ unActivityEngines.asScala.foreach(engine => {
engineList.add(engine.getServiceInstance)
if (engineList.size() >= GovernanceConstant.REQUEST_ENGINE_STATUS_BATCH_LIMIT) {
queryEngineStatusAndHandle(engineConnExecutorCache, engineList, endJobByEngineInstance)
@@ -72,7 +70,7 @@
}
val endTime = System.currentTimeMillis()
if (endTime - startTime >= ComputationOrchestratorConf.ENGINECONN_ACTIVITY_MONITOR_INTERVAL.getValue.toLong) {
- error("Query engines status costs longer time than query task interval, you should increase interval.")
+ warn("Query engines status costs longer time than query task interval, you should increase interval.")
}
}
}
@@ -91,12 +89,21 @@
info(s"ResponseEngineStatusBatch msg : ${response.msg}")
}
if (response.engineStatus.size() != requestEngineStatus.engineList.size()) {
- error("ResponseEngineStatusBatch engines size is not euqal requet.")
+ error(s"ResponseEngineStatusBatch engines size : ${response.engineStatus.size()} is not euqal requet : ${requestEngineStatus.engineList.size()}.")
+ val unKnownEngines = new ArrayBuffer[ServiceInstance]()
+ requestEngineStatus.engineList.asScala.foreach(instance => {
+ if (!response.engineStatus.containsKey(instance)) {
+ response.engineStatus.put(instance, NodeExistStatus.Unknown)
+ unKnownEngines += instance
+ }
+ })
+ val instances = unKnownEngines.map(_.getInstance).mkString(",")
+ warn(s"These engine instances cannot be found in manager : ${instances}")
}
response.engineStatus.asScala.foreach(status => {
status._2 match {
case NodeExistStatus.UnExist =>
- warn(s"Engine ${status._1.toString} is Failed, now go to clear its task.")
+ warn(s"Engine ${status._1} is Failed, now go to clear its task.")
endJobByEngineInstance(status._1)
case NodeExistStatus.Exist | NodeExistStatus.Unknown =>
val engineConnExecutor = engineConnExecutorCache.getOrDefault(status._1, null)
@@ -109,7 +116,9 @@
if (NodeStatus.isCompleted(rs.getNodeStatus)) {
endJobByEngineInstance(status._1)
} else {
- warn("Will update engineConnExecutor lastupdated time")
+ if (logger.isDebugEnabled()) {
+ debug(s"Will update engineConnExecutor(${status._1}) lastupdated time")
+ }
updateExecutorActivityTime(status._1, engineConnExecutorCache)
}
case o: Any =>
@@ -117,7 +126,7 @@
}
} {
case t: Throwable =>
- error(s"Failed to get status of engineConn : ${status._1.toString}, now end the job. ", t)
+ error(s"Failed to get status of engineConn : ${status._1}, now end the job. ", t)
endJobByEngineInstance(status._1)
}
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/operation/progress/AbstractProgressOperation.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/operation/progress/AbstractProgressOperation.scala
index 7d12500..1c826e5 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/operation/progress/AbstractProgressOperation.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/operation/progress/AbstractProgressOperation.scala
@@ -73,7 +73,19 @@
}
override def onEventError(event: Event, t: Throwable): Unit = {
- info(s"Accept error event in progress operation, message: ${t.getMessage}")
+ var eventName: String = "Null Event"
+ var message: String = "NULL"
+ var cause: String = "NULL"
+ if (null != event) {
+ eventName = event.getClass.getName
+ }
+ if (null != t) {
+ message = t.getMessage
+ if (null != t.getCause) {
+ cause = t.getCause.getMessage
+ }
+ }
+ debug(s"Accept error event ${eventName} in progress operation, message: ${message}, cause : ${cause}")
}
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/physical/CodeLogicalUnitExecTask.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/physical/CodeLogicalUnitExecTask.scala
index 4c57356..6a385e9 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/physical/CodeLogicalUnitExecTask.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/physical/CodeLogicalUnitExecTask.scala
@@ -20,12 +20,14 @@
import java.util.concurrent.TimeUnit
import com.webank.wedatasphere.linkis.common.exception.{ErrorException, LinkisRetryException, WarnException}
+import com.webank.wedatasphere.linkis.common.log.LogUtils
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.governance.common.protocol.task.{RequestTask, RequestTaskExecute}
import com.webank.wedatasphere.linkis.governance.common.utils.GovernanceConstant
import com.webank.wedatasphere.linkis.manager.label.entity.Label
import com.webank.wedatasphere.linkis.orchestrator.computation.conf.ComputationOrchestratorConf
import com.webank.wedatasphere.linkis.orchestrator.computation.execute.{CodeExecTaskExecutor, CodeExecTaskExecutorManager}
+import com.webank.wedatasphere.linkis.orchestrator.ecm.conf.ECMPluginConf
import com.webank.wedatasphere.linkis.orchestrator.exception.{OrchestratorErrorCodeSummary, OrchestratorErrorException, OrchestratorRetryException}
import com.webank.wedatasphere.linkis.orchestrator.execution.AsyncTaskResponse.NotifyListener
import com.webank.wedatasphere.linkis.orchestrator.execution.impl.DefaultFailedTaskResponse
@@ -58,26 +60,33 @@
private val askDuration = Duration(ComputationOrchestratorConf.MAX_ASK_EXECUTOR_TIME.getValue.toLong, TimeUnit.MILLISECONDS)
private var codeLogicalUnit: CodeLogicalUnit = _
+ private var isCanceled = false
+
override def execute(): TaskResponse = {
info(s"Start to execute CodeLogicalUnitExecTask(${getIDInfo()}).")
var executor: Option[CodeExecTaskExecutor] = None
var retryException: LinkisRetryException = null
- executor = Utils.tryCatch(codeExecTaskExecutorManager.askExecutor(this, askDuration)) {
+ executor = Utils.tryCatch(codeExecTaskExecutorManager.askExecutor(this)) {
case retry: LinkisRetryException =>
retryException = retry
None
case e:ErrorException =>
- //job.getLogListener.foreach(_.onLogUpdate(job, LogUtils.generateERROR(e.getMessage)))
throw e
case error: Throwable =>
- //job.getLogListener.foreach(_.onLogUpdate(job, LogUtils.generateERROR(error.getMessage)))
throw error
}
- if (executor.isDefined) {
+ if (executor.isDefined && !isCanceled) {
val requestTask = toRequestTask
val codeExecutor = executor.get
- val response = codeExecutor.getEngineConnExecutor.execute(requestTask)
+ val response = Utils.tryCatch(codeExecutor.getEngineConnExecutor.execute(requestTask)){
+ t: Throwable =>
+ error(s"Failed to submit ${getIDInfo()} to ${codeExecutor.getEngineConnExecutor.getServiceInstance}", t)
+ codeExecTaskExecutorManager.getByExecTaskId(this.getId).foreach { codeEngineConnExecutor =>
+ codeExecTaskExecutorManager.markECFailed(this, codeEngineConnExecutor)
+ }
+ throw new LinkisRetryException(ECMPluginConf.ECM_ENGNE_CREATION_ERROR_CODE, t.getMessage)
+ }
response match {
case SubmitResponse(engineConnExecId) =>
//封装engineConnExecId信息
@@ -104,7 +113,7 @@
private def toRequestTask: RequestTask ={
val requestTask = new RequestTaskExecute
requestTask.setCode(getCodeLogicalUnit.toStringCode)
- getLabels.add(getCodeLogicalUnit.getLabel)
+ //getLabels.add(getCodeLogicalUnit.getLabel)
requestTask.setLabels(getLabels)
//Map
// if (null != getParams.getRuntimeParams.getDataSources ) {
@@ -127,6 +136,7 @@
// case _ => null
// })
requestTask.getProperties.putAll(getParams.getRuntimeParams.toMap)
+ requestTask.setSourceID(getIDInfo())
requestTask
}
@@ -183,6 +193,7 @@
Utils.tryAndWarn(codeExecTaskExecutorManager.unLockEngineConn(this, codeEngineConnExecutor))
}
}
+ isCanceled = true
}
def getCodeEngineConnExecutor: CodeExecTaskExecutor = {
@@ -206,11 +217,16 @@
codeExecTaskExecutorManager.getByExecTaskId(this.getId).foreach { codeEngineConnExecutor =>
if (isSucceed) {
- info(s"ExecTask(${getIDInfo()}) execute success executor be delete.")
+ debug(s"ExecTask(${getIDInfo()}) execute success executor be delete.")
Utils.tryAndWarn(codeExecTaskExecutorManager.delete(this, codeEngineConnExecutor))
} else {
- info(s"ExecTask(${getIDInfo()}) execute failed executor be unLock.")
- Utils.tryAndWarn(codeExecTaskExecutorManager.unLockEngineConn(this, codeEngineConnExecutor))
+ if (StringUtils.isBlank(codeEngineConnExecutor.getEngineConnTaskId)) {
+ error(s"${getIDInfo()} Failed to submit running, now to remove codeEngineConnExecutor, forceRelease")
+ codeExecTaskExecutorManager.markECFailed(this, codeEngineConnExecutor)
+ } else {
+ debug(s"ExecTask(${getIDInfo()}) execute failed executor be unLock.")
+ Utils.tryAndWarn(codeExecTaskExecutorManager.unLockEngineConn(this, codeEngineConnExecutor))
+ }
}
}
}
diff --git a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala
index 3682032..9332349 100644
--- a/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala
+++ b/linkis-orchestrator/linkis-computation-orchestrator/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/computation/service/ComputationTaskExecutionReceiver.scala
@@ -17,6 +17,7 @@
package com.webank.wedatasphere.linkis.orchestrator.computation.service
+import com.webank.wedatasphere.linkis.common.log.LogUtils
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.governance.common.entity.ExecutionNodeStatus
import com.webank.wedatasphere.linkis.governance.common.protocol.task._
@@ -58,15 +59,17 @@
val failedTaskMap = synchronized {
taskToExecutorCache.filter(_._2.getEngineConnExecutor.getServiceInstance.equals(failedEngineServiceInstance))
}
- if (null != failedTaskMap && !failedTaskMap.isEmpty) {
+ if (null != failedTaskMap && failedTaskMap.nonEmpty) {
failedTaskMap.foreach{
case (taskId, executor) =>
val execTask = executor.getExecTask
Utils.tryAndError {
- info(s"Will kill task ${execTask.getId} because the engine quitted unexpectedly.")
- val logEvent = TaskLogEvent(execTask, s"Your job : ${taskId} was failed because the engine quitted unexpectedly.")
+ warn(s"Will kill task ${execTask.getIDInfo()} because the engine ${executor.getEngineConnExecutor.getServiceInstance.toString} quited unexpectedly.")
+ val errLog = LogUtils.generateERROR(s"Your job : ${execTask.getIDInfo()} was failed because the engine quitted unexpectedly(任务${execTask.getIDInfo()}失败," +
+ s"原因是引擎意外退出,可能是复杂任务导致引擎退出,如OOM).")
+ val logEvent = TaskLogEvent(execTask, errLog)
execTask.getPhysicalContext.pushLog(logEvent)
- val errorResponseEvent = TaskErrorResponseEvent(execTask, "Engine quitted unexpectedly.")
+ val errorResponseEvent = TaskErrorResponseEvent(execTask, "task failed,Engine quitted unexpectedly(任务运行失败原因是引擎意外退出,可能是复杂任务导致引擎退出,如OOM).")
execTask.getPhysicalContext.broadcastSyncEvent(errorResponseEvent)
val statusEvent = TaskStatusEvent(execTask, ExecutionNodeStatus.Failed)
execTask.getPhysicalContext.broadcastSyncEvent(statusEvent)
@@ -82,6 +85,7 @@
codeExecTaskExecutorManager.getByEngineConnAndTaskId(serviceInstance, taskLog.execId).foreach { codeExecutor =>
val event = TaskLogEvent(codeExecutor.getExecTask, taskLog.log)
codeExecutor.getExecTask.getPhysicalContext.pushLog(event)
+ codeExecutor.getEngineConnExecutor.updateLastUpdateTime()
//asyncListenerBus.post(event)
}
}
@@ -92,6 +96,7 @@
codeExecTaskExecutorManager.getByEngineConnAndTaskId(serviceInstance, taskProgress.execId).foreach{ codeExecutor =>
val event = TaskProgressEvent(codeExecutor.getExecTask, taskProgress.progress, taskProgress.progressInfo)
codeExecutor.getExecTask.getPhysicalContext.pushProgress(event)
+ codeExecutor.getEngineConnExecutor.updateLastUpdateTime()
}
}
@@ -102,6 +107,7 @@
val event = TaskStatusEvent(codeExecutor.getExecTask, taskStatus.status)
info(s"From engineConn receive status info:$taskStatus, now post to listenerBus event: $event")
codeExecutor.getExecTask.getPhysicalContext.broadcastSyncEvent(event)
+ codeExecutor.getEngineConnExecutor.updateLastUpdateTime()
}
}
@@ -112,6 +118,7 @@
val event = TaskResultSetSizeEvent(codeExecutor.getExecTask, taskResultSize.resultSize)
info(s"From engineConn receive resultSet size info$taskResultSize, now post to listenerBus event: $event")
codeExecutor.getExecTask.getPhysicalContext.broadcastSyncEvent(event)
+ codeExecutor.getEngineConnExecutor.updateLastUpdateTime()
}
}
@@ -122,6 +129,7 @@
val event = TaskResultSetEvent(codeExecutor.getExecTask, ResultSet(taskResultSet.output, taskResultSet.alias))
info(s"From engineConn receive resultSet info $taskResultSet , now post to listenerBus event: $event")
codeExecutor.getExecTask.getPhysicalContext.broadcastSyncEvent(event)
+ codeExecutor.getEngineConnExecutor.updateLastUpdateTime()
}
}
@@ -136,6 +144,7 @@
val event = TaskErrorResponseEvent(codeExecutor.getExecTask, responseTaskError.errorMsg)
info(s"From engineConn receive responseTaskError info$responseTaskError, now post to listenerBus event: $event")
codeExecutor.getExecTask.getPhysicalContext.broadcastSyncEvent(event)
+ codeExecutor.getEngineConnExecutor.updateLastUpdateTime()
}
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/pom.xml b/linkis-orchestrator/linkis-orchestrator-core/pom.xml
index 8b490ad..436eea9 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/pom.xml
+++ b/linkis-orchestrator/linkis-orchestrator-core/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis-orchestrator</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/conf/OrchestratorConfiguration.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/conf/OrchestratorConfiguration.scala
index 3905a38..09ce4c9 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/conf/OrchestratorConfiguration.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/conf/OrchestratorConfiguration.scala
@@ -44,13 +44,14 @@
val EXECUTION_TASK_MAX_PARALLELISM = CommonVars("wds.linkis.orchestrator.execution.task.max.parallelism", 5)
- val TASK_RUNNER_MAX_SIZE = CommonVars("wds.linkis.orchestrator.execution.task.runner.max.size", 30)
+ val TASK_RUNNER_MAX_SIZE = CommonVars("wds.linkis.orchestrator.execution.task.runner.max.size", 200)
val EXEC_RUNNER_FACTORY_CLASS = CommonVars("wds.linkis.orchestrator.exec.task.runner.factory.class", "")
val TASK_MAX_PERSIST_WAIT_TIME = CommonVars("wds.linkis.orchestrator.task.persist.wait.max", new TimeType("5m"))
+ val RETRY_TASK_WAIT_TIME = CommonVars("wds.linkis.orchestrator.task.retry.wait.time", 10000)
val TASK_SCHEDULER_THREAD_POOL = CommonVars("wds.linkis.orchestrator.task.scheduler.thread.pool", 200)
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/converter/AbstractConverter.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/converter/AbstractConverter.scala
index f61950e..fc33bca 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/converter/AbstractConverter.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/converter/AbstractConverter.scala
@@ -36,7 +36,7 @@
override def convert(jobReq: JobReq): Job = {
val context = createASTContext(jobReq)
converterCheckRulers.foreach(_(jobReq, context))
- info(s"Start to convert JobReq(${jobReq.getId}) to AstJob.")
+ debug(s"Start to convert JobReq(${jobReq.getId}) to AstJob.")
val job = converterTransforms.collectFirst { case transform => transform(jobReq, context) }
.getOrElse(throw new OrchestratorErrorException(OrchestratorErrorCodeSummary.CONVERTER_FOR_NOT_SUPPORT_ERROR_CODE, "Cannot convert jobReq " + jobReq))
info(s"Finished to convert JobReq(${jobReq.getId}) to AstJob(${job.getId}).")
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/AbstractExecution.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/AbstractExecution.scala
index b8b4be1..a589178 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/AbstractExecution.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/AbstractExecution.scala
@@ -33,7 +33,7 @@
val taskConsumer: TaskConsumer
//TODO 容器清理
- protected val execTaskToExecutionTasks = new util.HashMap[ExecTask, ExecutionTask]()
+ protected val execTaskToExecutionTasks = new util.concurrent.ConcurrentHashMap[ExecTask, ExecutionTask]()
def getAllExecutionTasks(): Array[ExecutionTask]
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/BaseTaskScheduler.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/BaseTaskScheduler.scala
index 5530fd0..92bd443 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/BaseTaskScheduler.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/BaseTaskScheduler.scala
@@ -61,7 +61,7 @@
// TODO Should support to add task to ready queue, since a complex scheduler is needed,
// such as: fair, priority... we should schedule them by using some algorithms.
// TODO Here, we should also remove the futures which is completed normally in taskFutureCache and taskIdTaskCache.
- info(s"launch task Runner ${task.task.getIDInfo()}")
+ debug(s"launch task Runner ${task.task.getIDInfo()}")
val future = executeService.submit(task)
if (! future.isDone) {
taskFutureCache.put(task.task.getId, future)
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/DefaultTaskManager.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/DefaultTaskManager.scala
index 2ea6e95..0a6e472 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/DefaultTaskManager.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/DefaultTaskManager.scala
@@ -16,22 +16,22 @@
package com.webank.wedatasphere.linkis.orchestrator.execution.impl
+import java.util
+import java.util.concurrent.CopyOnWriteArrayList
+
import com.webank.wedatasphere.linkis.common.listener.Event
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.governance.common.entity.ExecutionNodeStatus
import com.webank.wedatasphere.linkis.orchestrator.conf.OrchestratorConfiguration
import com.webank.wedatasphere.linkis.orchestrator.execution._
+import com.webank.wedatasphere.linkis.orchestrator.listener.OrchestratorSyncEvent
import com.webank.wedatasphere.linkis.orchestrator.listener.execution.ExecutionTaskCompletedEvent
import com.webank.wedatasphere.linkis.orchestrator.listener.task._
-import com.webank.wedatasphere.linkis.orchestrator.listener.{OrchestratorListenerBusContext, OrchestratorSyncEvent, OrchestratorSyncListenerBus}
import com.webank.wedatasphere.linkis.orchestrator.plans.physical.ExecTask
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
-import java.util
-import java.util.concurrent
-import java.util.concurrent.CopyOnWriteArrayList
/**
*
@@ -50,7 +50,7 @@
* value: ExecutionTask in running
*/
private val execTaskToExecutionTask: mutable.Map[String, ExecutionTask] = new mutable.HashMap[String, ExecutionTask]()
-
+ private val execTaskToExecutionTaskWriteLock = new Array[Byte](0)
/**
* key: ExecutionTaskID
* value: Array ExecTaskRunner in running
@@ -70,6 +70,7 @@
private val userRunningNumber: UserRunningNumber = new UserRunningNumber
+
/**
* create ExecutionTask
*
@@ -80,8 +81,10 @@
if (null != task) {
val executionTask = new BaseExecutionTask(OrchestratorConfiguration.EXECUTION_TASK_MAX_PARALLELISM.getValue, task)
executionTasks.add(executionTask)
- execTaskToExecutionTask.put(task.getId, executionTask)
- info(s"submit execTask ${task.getIDInfo()} to taskManager get executionTask ${executionTask.getId}")
+ execTaskToExecutionTaskWriteLock synchronized {
+ execTaskToExecutionTask.put(task.getId, executionTask)
+ }
+ logger.info(s"submit execTask ${task.getIDInfo()} to taskManager get executionTask ${executionTask.getId}")
task.getPhysicalContext.broadcastAsyncEvent(TaskConsumerEvent(task))
return executionTask
}
@@ -111,9 +114,9 @@
def getRunnableExecutionTasks: Array[ExecutionTask] = getSuitableExecutionTasks.filter { executionTask =>
val execTask = executionTask.getRootExecTask
- val subTasks = new java.util.HashSet[ExecTask]()
+ val subTasks = new mutable.HashSet[ExecTask]()
getSubTasksRecursively(executionTask, execTask, subTasks)
- !subTasks.isEmpty
+ subTasks.nonEmpty
}
protected def getSuitableExecutionTasks: Array[ExecutionTask] = {
@@ -137,9 +140,9 @@
//1. Get all runnable TaskRunner
runningExecutionTasks.foreach { executionTask =>
val execTask = executionTask.getRootExecTask
- val subTasks = new java.util.HashSet[ExecTask]()
- getSubTasksRecursively(executionTask, execTask, subTasks)
- val subExecTaskRunners = subTasks.asScala.map(execTaskToTaskRunner)
+ val runnableSubTasks = new mutable.HashSet[ExecTask]()
+ getSubTasksRecursively(executionTask, execTask, runnableSubTasks)
+ val subExecTaskRunners = runnableSubTasks.map(execTaskToTaskRunner)
execTaskRunners ++= subExecTaskRunners
}
@@ -147,7 +150,7 @@
val nowRunningNumber = executionTaskToRunningExecTask.values.map(_.length).sum
val maxRunning = if (nowRunningNumber >= MAX_RUNNER_TASK_SIZE) 0 else MAX_RUNNER_TASK_SIZE - nowRunningNumber
val runnableTasks = if (maxRunning == 0) {
- warn(s"The current running has exceeded the maximum, now: $nowRunningNumber ")
+ logger.warn(s"The current running has exceeded the maximum, now: $nowRunningNumber ")
Array.empty[ExecTaskRunner]
} else if (execTaskRunners.isEmpty) {
debug("There are no tasks to run now")
@@ -163,9 +166,23 @@
val executionTask = execTaskToExecutionTask.get(execTask.getPhysicalContext.getRootTask.getId)
if (executionTask.isDefined) {
val executionTaskId = executionTask.get.getId
- val runningExecTaskRunner = executionTaskToRunningExecTask.getOrElseUpdate(executionTaskId, new ArrayBuffer[ExecTaskRunner]())
- runningExecTaskRunner += userTaskRunner.taskRunner
- runners += userTaskRunner.taskRunner
+ executionTaskToRunningExecTask synchronized {
+ val runningExecTaskRunner = if (!executionTaskToRunningExecTask.contains(executionTaskId)) {
+ val taskRunnerBuffer = new ArrayBuffer[ExecTaskRunner]()
+ executionTaskToRunningExecTask.put(executionTaskId, taskRunnerBuffer)
+ val astContext = execTask.getTaskDesc.getOrigin.getASTOrchestration.getASTContext
+ //Running Execution task add
+ val oldNumber = userRunningNumber.addNumber(astContext.getExecuteUser, astContext.getLabels)
+ logger.info(s"user key ${userRunningNumber.getKey(astContext.getLabels, astContext.getExecuteUser)}, " +
+ s"executionTaskId $executionTaskId to addNumber: ${oldNumber + 1 }")
+ taskRunnerBuffer
+ } else {
+ executionTaskToRunningExecTask(executionTaskId)
+ }
+ runningExecTaskRunner.append(userTaskRunner.taskRunner)
+ runners += userTaskRunner.taskRunner
+ }
+
}
}
runners.toArray
@@ -183,24 +200,41 @@
* @param task
*/
override def addCompletedTask(task: ExecTaskRunner): Unit = {
+ logger.info(s"${task.task.getIDInfo()} task completed, now remove from taskManager")
val rootTask = task.task.getPhysicalContext.getRootTask
+ val astContext = rootTask.getTaskDesc.getOrigin.getASTOrchestration.getASTContext
execTaskToExecutionTask.get(rootTask.getId).foreach { executionTask =>
- executionTaskToRunningExecTask synchronized {
- val oldRunningTasks = executionTaskToRunningExecTask.getOrElse(executionTask.getId, null)
- //from running ExecTasks to remove completed execTasks
- if (null != oldRunningTasks) {
- val runningRunners = oldRunningTasks.filterNot(runner => task.task.getId.equals(runner.task.getId))
- executionTaskToRunningExecTask.put(executionTask.getId, runningRunners)
- }
- }
//put completed execTasks to completed collections
executionTaskToCompletedExecTask synchronized {
val completedRunners = executionTaskToCompletedExecTask.getOrElseUpdate(executionTask.getId, new ArrayBuffer[ExecTaskRunner]())
if (!completedRunners.exists(_.task.getId.equals(task.task.getId))) {
completedRunners += task
- userRunningNumber.minusNumber(task.task.getTaskDesc.getOrigin.getASTOrchestration.getASTContext.getExecuteUser)
+ } else {
+ logger.error(s"Task${task.task.getIDInfo()} has completed, but has in completed")
}
}
+
+ executionTaskToRunningExecTask synchronized {
+ val oldRunningTasks = executionTaskToRunningExecTask.getOrElse(executionTask.getId, null)
+ //from running ExecTasks to remove completed execTasks
+ var shouldMinusTaskNumber = false
+ if (null != oldRunningTasks) {
+ val runningRunners = oldRunningTasks.filterNot(runner => task.task.getId.equals(runner.task.getId))
+ if (runningRunners.isEmpty) {
+ executionTaskToRunningExecTask.remove(executionTask.getId)
+ shouldMinusTaskNumber = true
+ } else {
+ executionTaskToRunningExecTask.put(executionTask.getId, runningRunners)
+ }
+ } else {
+ shouldMinusTaskNumber = true
+ }
+ if (shouldMinusTaskNumber) {
+ val oldNumber = userRunningNumber.minusNumber(astContext.getExecuteUser, astContext.getLabels)
+ logger.info(s"executionTask(${executionTask.getId}) no task running, user key ${userRunningNumber.getKey(astContext.getLabels, astContext.getExecuteUser)}, minusNumber: ${oldNumber - 1 }")
+ }
+ }
+
}
rootTask.getPhysicalContext.broadcastAsyncEvent(TaskConsumerEvent(task.task))
}
@@ -227,11 +261,13 @@
* @param execTask
* @param subTasks
*/
- private def getSubTasksRecursively(executionTask: ExecutionTask, execTask: ExecTask, subTasks: java.util.Set[ExecTask]): Unit = {
+ private def getSubTasksRecursively(executionTask: ExecutionTask, execTask: ExecTask, subTasks: mutable.Set[ExecTask]): Unit = {
if (subTasks.size > executionTask.getMaxParallelism || isExecuted(executionTask, execTask)) return
val tasks = findUnCompletedExecTasks(executionTask.getId, execTask.getChildren)
if (null == tasks || tasks.isEmpty) {
- subTasks.add(execTask)
+ if (execTask.canExecute) {
+ subTasks.add(execTask)
+ }
} else {
//递归子节点
tasks.foreach(getSubTasksRecursively(executionTask, _, subTasks))
@@ -277,17 +313,31 @@
private def clearExecutionTask(executionTask: ExecutionTask): Unit = {
+
+ val task = executionTask.getRootExecTask
+ val astContext = task.getTaskDesc.getOrigin.getASTOrchestration.getASTContext
+ logger.info(s"executionTask(${executionTask.getId}) finished user key ${userRunningNumber.getKey(astContext.getLabels, astContext.getExecuteUser)}")
// from executionTask to remove executionTask
executionTasks.remove(executionTask)
// from execTaskToExecutionTask to remove root execTask
- execTaskToExecutionTask.remove(executionTask.getRootExecTask.getId)
+ execTaskToExecutionTaskWriteLock synchronized {
+ execTaskToExecutionTask.remove(task.getId)
+ }
// from executionTaskToCompletedExecTask to remove executionTask
- executionTaskToCompletedExecTask.remove(executionTask.getId)
- executionTaskToRunningExecTask.remove(executionTask.getId).foreach(_.foreach(execTaskRunner => execTaskRunner.interrupt()))
+ executionTaskToCompletedExecTask synchronized {
+ executionTaskToCompletedExecTask.remove(executionTask.getId)
+ }
+ val maybeRunners = executionTaskToRunningExecTask synchronized {
+ executionTaskToRunningExecTask.remove(executionTask.getId)
+ }
+ if (maybeRunners.isDefined) {
+ val oldNumber = userRunningNumber.minusNumber(astContext.getExecuteUser, astContext.getLabels)
+ logger.info(s"executionTask(${executionTask.getId}) finished user key ${userRunningNumber.getKey(astContext.getLabels, astContext.getExecuteUser)}, minusNumber: ${oldNumber - 1 }")
+ }
}
override def onRootTaskResponseEvent(rootTaskResponseEvent: RootTaskResponseEvent): Unit = {
- info(s"received rootTaskResponseEvent ${rootTaskResponseEvent.execTask.getIDInfo()}")
+ logger.info(s"received rootTaskResponseEvent ${rootTaskResponseEvent.execTask.getIDInfo()}")
val rootTask = rootTaskResponseEvent.execTask
val maybeTask = execTaskToExecutionTask.get(rootTask.getId)
if (maybeTask.isDefined) {
@@ -304,10 +354,10 @@
override protected def markExecutionTaskCompleted(executionTask: ExecutionTask, taskResponse: CompletedTaskResponse): Unit = {
- info(s"Start to mark executionTask(${executionTask.getId}) to Completed.")
+ debug(s"Start to mark executionTask(${executionTask.getId}) rootExecTask ${executionTask.getRootExecTask.getIDInfo()} to Completed.")
clearExecutionTask(executionTask)
executionTask.getRootExecTask.getPhysicalContext.broadcastSyncEvent(ExecutionTaskCompletedEvent(executionTask.getId, taskResponse))
- info(s"Finished to mark executionTask(${executionTask.getId}) to Completed.")
+ logger.info(s"Finished to mark executionTask(${executionTask.getId}) rootExecTask ${executionTask.getRootExecTask.getIDInfo()} to Completed.")
}
override def onEventError(event: Event, t: Throwable): Unit = {}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/NotifyTaskConsumer.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/NotifyTaskConsumer.scala
index 1c21259..2cc4a38 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/NotifyTaskConsumer.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/NotifyTaskConsumer.scala
@@ -38,18 +38,19 @@
protected def afterLaunchTask(runnableTasks: Array[ExecTaskRunner]): Unit = {}
- override def run(): Unit = Utils.tryAndErrorMsg{
- while(!isStopped) {
- beforeFetchLaunchTask()
- val runnableTasks = getExecution.taskManager.getRunnableTasks
- beforeLaunchTask(runnableTasks)
- runnableTasks.foreach(getExecution.taskScheduler.launchTask)
- afterLaunchTask(runnableTasks)
- notifyLock synchronized {
- notifyLock.wait(getWaitTime)
- }
- }
- }("Consumer exit")
+ override def run(): Unit = {
+ while (!isStopped)
+ Utils.tryAndErrorMsg {
+ beforeFetchLaunchTask()
+ val runnableTasks = getExecution.taskManager.getRunnableTasks
+ beforeLaunchTask(runnableTasks)
+ runnableTasks.foreach(getExecution.taskScheduler.launchTask)
+ afterLaunchTask(runnableTasks)
+ notifyLock synchronized {
+ notifyLock.wait(getWaitTime)
+ }
+ }("Consumer error")
+ }
override def onEvent(event: OrchestratorAsyncEvent): Unit = event match {
case taskConsumerEvent: TaskConsumerEvent =>
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserRunningNumber.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserRunningNumber.scala
index d8a44f8..d422c65 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserRunningNumber.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserRunningNumber.scala
@@ -16,41 +16,73 @@
package com.webank.wedatasphere.linkis.orchestrator.execution.impl
-import scala.collection.mutable
+import java.util
+
+import com.webank.wedatasphere.linkis.manager.label.entity.Label
+import com.webank.wedatasphere.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel}
+import scala.collection.JavaConverters._
class UserRunningNumber {
- private val runningNumber: mutable.Map[String, Int] = new mutable.HashMap[String, Int]()
+ private val runningNumber: java.util.Map[String, Int] = new java.util.HashMap[String, Int]()
- def this(sourceRunningNumber: mutable.Map[String, Int]) = {
+ private val SPLIT = ","
+
+ def this(sourceRunningNumber: java.util.Map[String, Int]) = {
this()
- this.runningNumber ++= sourceRunningNumber
+ this.runningNumber.putAll(sourceRunningNumber)
}
- def addNumber(user: String, number: Int = 1): Int = synchronized {
- val oldNumber = runningNumber.getOrElse(user, 0)
- runningNumber.put(user, oldNumber + number)
+ def addNumber(user: String, labels: util.List[Label[_]], number: Int = 1): Int = synchronized {
+ val key = getKey(labels, user)
+ val oldNumber = runningNumber.getOrDefault(key, 0)
+ runningNumber.put(key, oldNumber + number)
oldNumber
}
- def minusNumber(user: String, number: Int = 1): Int = synchronized {
- val oldNumber = runningNumber.getOrElse(user, 0)
+ def minusNumber(user: String, labels: util.List[Label[_]], number: Int = 1): Int = synchronized {
+ val key = getKey(labels, user)
+ val oldNumber = runningNumber.getOrDefault(key, 0)
val running = oldNumber - number
if (running > 0) {
- runningNumber.put(user, running)
+ runningNumber.put(key, running)
} else {
- runningNumber.remove(user)
+ runningNumber.remove(key)
}
oldNumber
}
- def getRunningNumber(user: String): Int = {
- runningNumber.getOrElse(user, 0)
+ def getRunningNumber(user: String, labels: util.List[Label[_]]): Int = {
+ val key = getKey(labels, user)
+ runningNumber.getOrDefault(key, 0)
}
- def copy(): UserRunningNumber = {
+ /**
+ * Copy the current running task situation
+ * @return
+ */
+ def copy(): UserRunningNumber = synchronized {
val newUserRunningNumber = new UserRunningNumber(runningNumber)
newUserRunningNumber
}
+ /**
+ * TODO 统一getKey方法
+ * @param labels
+ * @return
+ */
+ def getKey(labels: util.List[Label[_]], user: String): String = {
+ var userCreatorLabel: UserCreatorLabel = null
+ var engineTypeLabel: EngineTypeLabel = null
+ labels.asScala.foreach {
+ case label: UserCreatorLabel => userCreatorLabel = label
+ case label: EngineTypeLabel => engineTypeLabel = label
+ case _ =>
+ }
+ if (null != userCreatorLabel && null != engineTypeLabel) {
+ userCreatorLabel.getStringValue + SPLIT + engineTypeLabel.getStringValue
+ } else {
+ user
+ }
+ }
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserTaskRunnerQueue.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserTaskRunnerQueue.scala
index c17a564..27378f4 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserTaskRunnerQueue.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/execution/impl/UserTaskRunnerQueue.scala
@@ -65,7 +65,7 @@
val maxRunningNumber = if (userParallelOrchestratorPlugin.isDefined) userParallelOrchestratorPlugin.get.getUserMaxRunningJobs(user, labels) else {
DEFAULT_MAX_RUNNING
}
- val runningNumber = userRunningNumber.getRunningNumber(user)
+ val runningNumber = userRunningNumber.getRunningNumber(user, labels)
priorityQueue += UserTaskRunner(user, maxRunningNumber, runningNumber, taskRunner)
}
@@ -77,7 +77,7 @@
val maxRunningNumber = if (userParallelOrchestratorPlugin.isDefined) userParallelOrchestratorPlugin.get.getUserMaxRunningJobs(user, labels) else {
DEFAULT_MAX_RUNNING
}
- UserTaskRunner(user, maxRunningNumber, userRunningNumber.addNumber(user), taskRunner)
+ UserTaskRunner(user, maxRunningNumber, userRunningNumber.addNumber(user, labels), taskRunner)
}.filter(userTaskRunner => userTaskRunner.maxRunningNumber > userTaskRunner.runningNumber)
priorityQueue ++= runners
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/extensions/catalyst/TransformFactory.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/extensions/catalyst/TransformFactory.scala
index 7cd49c4..a5fc3a1 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/extensions/catalyst/TransformFactory.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/extensions/catalyst/TransformFactory.scala
@@ -57,7 +57,7 @@
var lastLoopTreeNode: In = from
var count = 0
while (true) {
- info(s"Try to analyze ${from.getName} count($count).")
+ debug(s"Try to analyze ${from.getName} count($count).")
count += 1
val newTreeNode = transforms.foldLeft(lastLoopTreeNode) { (treeNode, transform) => transform.apply(treeNode, context) }
if (newTreeNode.theSame(lastLoopTreeNode)) return newTreeNode
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/listener/task/TaskInfoEvent.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/listener/task/TaskInfoEvent.scala
index 9e779be..26d9a7d 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/listener/task/TaskInfoEvent.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/listener/task/TaskInfoEvent.scala
@@ -31,13 +31,29 @@
val execTask: ExecTask
}
-case class TaskStatusEvent(execTask: ExecTask, status: ExecutionNodeStatus) extends TaskInfoEvent with OrchestratorSyncEvent with OrchestratorAsyncEvent
+case class TaskStatusEvent(execTask: ExecTask, status: ExecutionNodeStatus) extends TaskInfoEvent with OrchestratorSyncEvent with OrchestratorAsyncEvent {
+ override def toString: String = {
+ s"task ${execTask.getIDInfo()}, status ${status}"
+ }
+}
-case class TaskResultSetEvent(execTask: ExecTask, resultSet: ResultSet) extends TaskInfoEvent with OrchestratorSyncEvent
+case class TaskResultSetEvent(execTask: ExecTask, resultSet: ResultSet) extends TaskInfoEvent with OrchestratorSyncEvent {
+ override def toString: String = {
+ s"task ${execTask.getIDInfo()}"
+ }
+}
-case class TaskResultSetSizeEvent(execTask: ExecTask, resultSize: Int) extends TaskInfoEvent with OrchestratorSyncEvent
+case class TaskResultSetSizeEvent(execTask: ExecTask, resultSize: Int) extends TaskInfoEvent with OrchestratorSyncEvent {
+ override def toString: String = {
+ s"task ${execTask.getIDInfo()}, size ${resultSize}"
+ }
+}
-case class TaskErrorResponseEvent(execTask: ExecTask, errorMsg: String) extends TaskInfoEvent with OrchestratorSyncEvent
+case class TaskErrorResponseEvent(execTask: ExecTask, errorMsg: String) extends TaskInfoEvent with OrchestratorSyncEvent {
+ override def toString: String = {
+ s"task ${execTask.getIDInfo()}, errorMsg ${errorMsg}"
+ }
+}
//case class TaskProgressEvent(execId: ExecTask, progress: Float, progressInfo: Array[JobProgressInfo]) extends TaskInfoEvent
@@ -50,6 +66,10 @@
case class TaskReheaterEvent(execTask: ExecTask) extends TaskInfoEvent with OrchestratorAsyncEvent
-case class TaskProgressEvent(execTask: ExecTask, progress: Float, progressInfo: Array[JobProgressInfo]) extends TaskInfoEvent with OrchestratorAsyncEvent
+case class TaskProgressEvent(execTask: ExecTask, progress: Float, progressInfo: Array[JobProgressInfo]) extends TaskInfoEvent with OrchestratorAsyncEvent {
+ override def toString: String = {
+ s"task ${execTask.getIDInfo()}, progress ${progress}"
+ }
+}
case class TaskConsumerEvent(execTask: ExecTask) extends TaskInfoEvent with OrchestratorAsyncEvent
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/optimizer/AbstractOptimizer.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/optimizer/AbstractOptimizer.scala
index 2eab61c..73811bf 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/optimizer/AbstractOptimizer.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/optimizer/AbstractOptimizer.scala
@@ -35,13 +35,13 @@
override def optimize(task: Task): ExecTask = {
val context = createLogicalContext(task)
//优化
- info(s"Start to optimize LogicalTree(${task.getId}).")
+ debug(s"Start to optimize LogicalTree(${task.getId}).")
val optimizedTask = apply(task, context, optimizerTransforms.map {
transform: Transform[Task, Task, LogicalContext] => transform
})
- info(s"Finished to optimize LogicalTree(${task.getId}).")
+ debug(s"Finished to optimize LogicalTree(${task.getId}).")
//物化
- info(s"Start to transform LogicalTree(${task.getId}) to PhysicalTree.")
+ debug(s"Start to transform LogicalTree(${task.getId}) to PhysicalTree.")
val execTask = apply(optimizedTask, context, new mutable.HashMap[Task, ExecTask], physicalTransforms.map{
transform: Transform[Task, ExecTask, LogicalContext] => transform
})
@@ -49,7 +49,7 @@
findLeafNode(execTask, leafNodes)
val physicalContext = createPhysicalContext(execTask, leafNodes.toArray)
initTreePhysicalContext(execTask, physicalContext)
- info(s"Finished to transform LogicalTree(${task.getId}) to PhysicalTree.")
+ debug(s"Finished to transform LogicalTree(${task.getId}) to PhysicalTree.")
execTask
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/parser/AbstractParser.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/parser/AbstractParser.scala
index 359b2ba..aad474b 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/parser/AbstractParser.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/parser/AbstractParser.scala
@@ -28,14 +28,14 @@
override def parse(astPlan: ASTOrchestration[_]): ASTOrchestration[_] = {
//parse
Option(parserTransforms).map { transforms =>
- info(s"Start to parse AstJob(${astPlan.getId}) to AstTree.")
+ debug(s"Start to parse AstJob(${astPlan.getId}) to AstTree.")
val newAstPlan = astPlan match {
case job: Job =>
apply(job, astPlan.getASTContext, transforms.map{
transform: Transform[Job, Job, ASTContext] => transform
})
}
- info(s"Finished to parse AstJob(${astPlan.getId}) to AstTree.")
+ debug(s"Finished to parse AstJob(${astPlan.getId}) to AstTree.")
newAstPlan
}.getOrElse{
info(s"AstJob(${astPlan.getId}) do not need to parse, ignore it.")
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/planner/AbstractPlanner.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/planner/AbstractPlanner.scala
index e717a74..9c7ee6a 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/planner/AbstractPlanner.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/planner/AbstractPlanner.scala
@@ -32,7 +32,7 @@
override def plan(astPlan: ASTOrchestration[_]): Task = astPlan match {
case job: Job =>
- info(s"start to plan AstTree(${astPlan.getId}) to LogicalTree.")
+ debug(s"start to plan AstTree(${astPlan.getId}) to LogicalTree.")
var task = apply(job, astPlan.getASTContext, new mutable.HashMap[Job, Task](), plannerTransforms.map{
transform: Transform[Job, Task, ASTContext] => transform
})
@@ -42,7 +42,7 @@
task = apply(task, context, analyzeTransforms.map{
transform: Transform[Task, Task, LogicalContext] => transform
})
- info(s"Finished to plan AstTree(${astPlan.getId}) to LogicalTree.")
+ debug(s"Finished to plan AstTree(${astPlan.getId}) to LogicalTree.")
task
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/ExecTask.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/ExecTask.scala
index ac6df5f..b5ab21f 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/ExecTask.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/ExecTask.scala
@@ -49,7 +49,7 @@
stage.getJob.getId
case _ => ""
}
- jobID + getId
+ jobID + "_" + getId
}
}
\ No newline at end of file
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/PhysicalContextImpl.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/PhysicalContextImpl.scala
index 79b2eba..a854e68 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/PhysicalContextImpl.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/PhysicalContextImpl.scala
@@ -18,6 +18,7 @@
import java.util
import com.webank.wedatasphere.linkis.common.listener.Event
+import com.webank.wedatasphere.linkis.common.log.LogUtils
import com.webank.wedatasphere.linkis.governance.common.entity.ExecutionNodeStatus
import com.webank.wedatasphere.linkis.orchestrator.exception.OrchestratorErrorCodeSummary
import com.webank.wedatasphere.linkis.orchestrator.execution.impl.DefaultFailedTaskResponse
@@ -55,8 +56,10 @@
override def markFailed(errorMsg: String, cause: Throwable): Unit = {
this.executionNodeStatus = ExecutionNodeStatus.Failed
- val failedResponse = new DefaultFailedTaskResponse(errorMsg,OrchestratorErrorCodeSummary.EXECUTION_ERROR_CODE, cause)
+ val failedResponse = new DefaultFailedTaskResponse(errorMsg, OrchestratorErrorCodeSummary.EXECUTION_ERROR_CODE, cause)
this.response = failedResponse
+ val event = TaskLogEvent(getRootTask, LogUtils.generateERROR(errorMsg))
+ pushLog(event)
syncListenerBus.postToAll(RootTaskResponseEvent(getRootTask, failedResponse))
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/RetryExecTask.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/RetryExecTask.scala
index 8f30a44..b0ab702 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/RetryExecTask.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/plans/physical/RetryExecTask.scala
@@ -13,8 +13,10 @@
package com.webank.wedatasphere.linkis.orchestrator.plans.physical
+import com.webank.wedatasphere.linkis.orchestrator.conf.OrchestratorConfiguration
import com.webank.wedatasphere.linkis.orchestrator.exception.{OrchestratorErrorCodeSummary, OrchestratorErrorException}
import com.webank.wedatasphere.linkis.orchestrator.execution.TaskResponse
+import com.webank.wedatasphere.linkis.orchestrator.listener.task.TaskInfoEvent
import com.webank.wedatasphere.linkis.orchestrator.plans.logical.TaskDesc
import com.webank.wedatasphere.linkis.orchestrator.strategy.{ResultSetExecTask, StatusInfoExecTask}
import com.webank.wedatasphere.linkis.orchestrator.strategy.async.AsyncExecTask
@@ -30,6 +32,8 @@
private var physicalContext: PhysicalContext = _
+ private val createTime = System.currentTimeMillis()
+
def getOriginTask: ExecTask = {
originTask
}
@@ -39,10 +43,14 @@
}
override def canExecute: Boolean = {
+ val takenTime = System.currentTimeMillis() - createTime
if(originTask != null) {
- originTask.canExecute
+ originTask.canExecute && takenTime > OrchestratorConfiguration.RETRY_TASK_WAIT_TIME.getValue
}
- else false
+ else {
+ throw new OrchestratorErrorException(OrchestratorErrorCodeSummary.EXECUTION_ERROR_CODE,
+ s"${getIDInfo()} originTask task cannot be null" )
+ }
}
override def execute(): TaskResponse = {
@@ -50,7 +58,7 @@
originTask.execute()
}else {
throw new OrchestratorErrorException(OrchestratorErrorCodeSummary.EXECUTION_ERROR_CODE,
- "task cannot be execute, task will be retried maybe not exist" +
+ s"${getIDInfo()} task cannot be execute, task will be retried maybe not exist" +
"(任务不允许被执行,被重热的任务可能不存在)")
}
}
@@ -109,4 +117,12 @@
case _ =>
}
}
+
+ override def canDealEvent(event: TaskInfoEvent): Boolean = {
+ if (null != getOriginTask ) {
+ getOriginTask.getId.equals(event.execTask.getId)
+ } else {
+ false
+ }
+ }
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/reheater/AbstractReheater.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/reheater/AbstractReheater.scala
index 83bde5f..078a80a 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/reheater/AbstractReheater.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/reheater/AbstractReheater.scala
@@ -28,7 +28,7 @@
override def reheat(execTask: ExecTask): Unit = execTask match {
case reheat: ReheatableExecTask =>
- info(s"Try to reheat ${execTask.getIDInfo()}.")
+ debug(s"Try to reheat ${execTask.getIDInfo()}.")
reheat.setReheating()
var changed = false
Utils.tryCatch(Option(reheaterTransforms).foreach { transforms =>
@@ -49,7 +49,9 @@
execTask.getPhysicalContext.markFailed(s"Reheat ${execTask.getIDInfo()} failed, now mark it failed!", t)
}
reheat.setReheated()
- info(s"${execTask.getIDInfo()} reheated. The physicalTree has been ${if(changed) s"changed. The new tree is ${execTask.simpleString}." else "not changed."}")
+ if(changed) {
+ info(s"${execTask.getIDInfo()} reheated. The physicalTree has been changed. The new tree is ${execTask.simpleString}." )
+ }
case _ =>
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyJobExecTask.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyJobExecTask.scala
index e1c2546..b1c5514 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyJobExecTask.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyJobExecTask.scala
@@ -18,7 +18,8 @@
import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.orchestrator.core.ResultSet
-import com.webank.wedatasphere.linkis.orchestrator.execution.impl.DefaultResultSetTaskResponse
+import com.webank.wedatasphere.linkis.orchestrator.exception.OrchestratorErrorCodeSummary
+import com.webank.wedatasphere.linkis.orchestrator.execution.impl.{DefaultFailedTaskResponse, DefaultResultSetTaskResponse}
import com.webank.wedatasphere.linkis.orchestrator.execution.{CompletedTaskResponse, SucceedTaskResponse, TaskResponse}
import com.webank.wedatasphere.linkis.orchestrator.plans.logical.{EndJobTaskDesc, StartJobTaskDesc}
import com.webank.wedatasphere.linkis.orchestrator.plans.physical.{ExecTask, JobExecTask, ReheatableExecTask}
@@ -26,27 +27,35 @@
import scala.collection.mutable.ArrayBuffer
/**
- *
- *
- */
+ *
+ *
+ */
class GatherStrategyJobExecTask(parents: Array[ExecTask],
children: Array[ExecTask]) extends JobExecTask(parents, children)
- with ReheatableExecTask with ResultSetExecTask with Logging{
+ with ReheatableExecTask with ResultSetExecTask with StatusInfoExecTask with Logging {
/**
- * Job End Task
- * Aggregate the results of response(汇总结果响应)
- *
- * @return
- */
+ * Job End Task 汇总结果响应
+ *
+ * @return
+ */
override def execute(): TaskResponse = getTaskDesc match {
case _: StartJobTaskDesc =>
super.execute()
case _: EndJobTaskDesc =>
if (getPhysicalContext.isCompleted) {
- info(s"Job end be skip")
- new CompletedTaskResponse() {}
+ val msg = s"PhysicalContext is completed, Job${getIDInfo()} will be mark Failed "
+ info(msg)
+ new DefaultFailedTaskResponse(msg, OrchestratorErrorCodeSummary.EXECUTION_FOR_EXECUTION_ERROR_CODE, null) {}
} else {
+
+ val errorExecTasks = getErrorChildrenExecTasks
+ if (errorExecTasks.isDefined) {
+ val errorReason = parseChildrenErrorInfo(errorExecTasks.get)
+ getPhysicalContext.markFailed(errorReason, null)
+ return new DefaultFailedTaskResponse(errorReason, OrchestratorErrorCodeSummary.STAGE_ERROR_CODE, null)
+ }
+
val execIdToResponse = getChildrenResultSet()
val response = if (null != execIdToResponse && execIdToResponse.nonEmpty) {
val resultSets = new ArrayBuffer[ResultSet]()
@@ -58,7 +67,7 @@
} else {
new SucceedTaskResponse() {}
}
- info(s"Job end execute finished, now to mark executionTask succeed")
+ debug(s"Job${getIDInfo()} end execute finished, now to mark executionTask succeed")
getPhysicalContext.markSucceed(response)
response
}
@@ -71,6 +80,6 @@
}
override def canExecute: Boolean = {
- ! getReheating
+ !getReheating
}
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyStageInfoExecTask.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyStageInfoExecTask.scala
index 9418206..17372a5 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyStageInfoExecTask.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/GatherStrategyStageInfoExecTask.scala
@@ -45,27 +45,11 @@
* @return
*/
override def execute(): TaskResponse = {
- val statusInfos = getChildrenExecTaskStatusInfo()
- if (null != statusInfos) {
- val errorExecTask = statusInfos.filter { entry =>
- !ExecutionNodeStatus.isSucceed(entry._2.nodeStatus) && ExecutionNodeStatus.isCompleted(entry._2.nodeStatus)
- }
- if (null != errorExecTask && errorExecTask.nonEmpty) {
- val errorReason = errorExecTask.map { entry =>
- val execTaskId = entry._1
- val statusInfo = entry._2
- val errorMsg = statusInfo.taskResponse match {
- case failedTaskResponse: FailedTaskResponse =>
- s"Task is Failed,errorMsg: ${failedTaskResponse.getErrorMsg}"
- case _ => s"Task($execTaskId) status not succeed,is ${statusInfo.nodeStatus}"
- }
- errorMsg
- }.mkString(";")
- error(s"There are Tasks execution failure ($errorReason) of stage ${getIDInfo()}, now mark ExecutionTask as failed")
-
- getPhysicalContext.markFailed(errorReason, null)
- return new DefaultFailedTaskResponse(errorReason, OrchestratorErrorCodeSummary.STAGE_ERROR_CODE, null)
- }
+ val errorExecTasks = getErrorChildrenExecTasks
+ if (errorExecTasks.isDefined) {
+ val errorReason = parseChildrenErrorInfo(errorExecTasks.get)
+ getPhysicalContext.markFailed(errorReason, null)
+ return new DefaultFailedTaskResponse(errorReason, OrchestratorErrorCodeSummary.STAGE_ERROR_CODE, null)
}
val execIdToResponse = getChildrenResultSet()
if (null != execIdToResponse && execIdToResponse.nonEmpty) {
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/StatusInfoExecTask.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/StatusInfoExecTask.scala
index 8ba4270..8f8c3ed 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/StatusInfoExecTask.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/StatusInfoExecTask.scala
@@ -16,6 +16,11 @@
package com.webank.wedatasphere.linkis.orchestrator.strategy
+import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.governance.common.entity.ExecutionNodeStatus
+import com.webank.wedatasphere.linkis.orchestrator.exception.OrchestratorErrorCodeSummary
+import com.webank.wedatasphere.linkis.orchestrator.execution.FailedTaskResponse
+import com.webank.wedatasphere.linkis.orchestrator.execution.impl.DefaultFailedTaskResponse
import com.webank.wedatasphere.linkis.orchestrator.plans.physical.ExecTask
import scala.collection.mutable
@@ -23,7 +28,7 @@
*
*
*/
-trait StatusInfoExecTask extends ExecTask {
+trait StatusInfoExecTask extends ExecTask with Logging{
/**
* This method is called by ExecTaskRunner, ExecTask cannot be called directly, which will cause repeated placement of status information
@@ -52,6 +57,38 @@
null
}
+
+ def parseChildrenErrorInfo(errorExecTasks: Map[String, ExecTaskStatusInfo]): String = {
+ if (null != errorExecTasks && errorExecTasks.nonEmpty) {
+ val errorReason = errorExecTasks.map { entry =>
+ val execTaskId = entry._1
+ val statusInfo = entry._2
+ val errorMsg = statusInfo.taskResponse match {
+ case failedTaskResponse: FailedTaskResponse =>
+ s"Task is Failed,errorMsg: ${failedTaskResponse.getErrorMsg}"
+ case _ => s"Task($execTaskId) status not succeed,is ${statusInfo.nodeStatus}"
+ }
+ errorMsg
+ }.mkString(";")
+ error(s"There are Tasks execution failure of stage ${getIDInfo()}, now mark ExecutionTask as failed")
+ return errorReason
+ }
+ "error info is null"
+ }
+
+ def getErrorChildrenExecTasks: Option[Map[String, ExecTaskStatusInfo]] = {
+ val statusInfos = getChildrenExecTaskStatusInfo()
+ if (null != statusInfos) {
+ val errorExecTask = statusInfos.filter { entry =>
+ !ExecutionNodeStatus.isSucceed(entry._2.nodeStatus) && ExecutionNodeStatus.isCompleted(entry._2.nodeStatus)
+ }
+ if (null != errorExecTask && errorExecTask.nonEmpty) {
+ return Some(errorExecTask)
+ }
+ }
+ None
+ }
+
}
object StatusInfoExecTask {
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTask.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTask.scala
index 5f8bece..a0830e2 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTask.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTask.scala
@@ -16,6 +16,7 @@
package com.webank.wedatasphere.linkis.orchestrator.strategy.async
+import com.webank.wedatasphere.linkis.orchestrator.listener.task.TaskInfoEvent
import com.webank.wedatasphere.linkis.orchestrator.plans.physical.ExecTask
/**
@@ -28,4 +29,13 @@
def clear(isSucceed: Boolean): Unit
+ /**
+ * Determine whether the event can be handled by the task
+ * @param event
+ * @return
+ */
+ def canDealEvent(event: TaskInfoEvent): Boolean = {
+ getId.equals(event.execTask.getId)
+ }
+
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala
index 6417f99..9b48efd 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncExecTaskRunnerImpl.scala
@@ -82,7 +82,7 @@
override def transientStatus(status: ExecutionNodeStatus): Unit = {
if (status.ordinal() < this.status.ordinal() && status != ExecutionNodeStatus.WaitForRetry) {
- warn(s"Task status flip error! Cause: Failed to flip from ${this.status} to $status.")
+ info(s"Task${task.getIDInfo()} status flip error! Cause: Failed to flip from ${this.status} to $status.")
return
}
//throw new OrchestratorErrorException(OrchestratorErrorCodeSummary.EXECUTION_FOR_EXECUTION_ERROR_CODE, s"Task status flip error! Cause: Failed to flip from ${this.status} to $status.") //抛异常
@@ -139,7 +139,7 @@
val startWaitForPersistedTime = System.currentTimeMillis
resultSets synchronized {
while (( resultSize < 0 || resultSets.size < resultSize) && !isWaitForPersistedTimeout(startWaitForPersistedTime))
- resultSets.wait(3000)
+ resultSets.wait(1000)
}
// if (isWaitForPersistedTimeout(startWaitForPersistedTime)) onFailure("persist resultSets timeout!", new EntranceErrorException(20305, "persist resultSets timeout!"))
}
@@ -152,23 +152,23 @@
override def interrupt(): Unit = {
- this.status = ExecutionNodeStatus.Cancelled
+ markFailed("Job be cancelled", null)
task match {
case asyncExecTask: AsyncExecTask =>
asyncExecTask.kill()
case _ =>
}
- markFailed("Job be cancelled", null)
+ transientStatus(ExecutionNodeStatus.Cancelled)
}
override def setResultSize(resultSize: Int): Unit = {
- info(s"BaseExecTaskRunner get result size is $resultSize")
+ info(s"BaseExecTaskRunner ${task.getIDInfo()} get result size is $resultSize")
if (this.resultSize == -1) this.resultSize = resultSize
resultSets.notify()
}
override def addResultSet(resultSet: ResultSet): Unit = {
- info(s"BaseExecTaskRunner get result, now size is ${resultSets.size}")
+ info(s"BaseExecTaskRunner ${task.getIDInfo()} get result, now size is ${resultSets.size}")
resultSets += resultSet
resultSets.notify()
}
diff --git a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncTaskManager.scala b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncTaskManager.scala
index aadd20c..326cc2b 100644
--- a/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncTaskManager.scala
+++ b/linkis-orchestrator/linkis-orchestrator-core/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/strategy/async/AsyncTaskManager.scala
@@ -49,11 +49,8 @@
}
override def onResultSetCreate(taskResultSetEvent: TaskResultSetEvent): Unit = {
- info(s"received taskResultSetEvent ${taskResultSetEvent.execTask.getId}")
- val execTask = taskResultSetEvent.execTask
- val rootExecTask = execTask.getPhysicalContext.getRootTask
- val runners = getRunningTask(rootExecTask)
- runners.find(_.task.getId.equals(execTask.getId)).foreach {
+ debug(s"received taskResultSetEvent ${taskResultSetEvent.execTask.getId}")
+ findDealEventTaskRunner(taskResultSetEvent).foreach {
case asyncExecTaskRunner: AsyncExecTaskRunner =>
asyncExecTaskRunner.addResultSet(taskResultSetEvent.resultSet)
case _ =>
@@ -61,11 +58,8 @@
}
override def onResultSizeCreated(taskResultSetSizeEvent: TaskResultSetSizeEvent): Unit = {
- info(s"received taskResultSetSizeEvent $taskResultSetSizeEvent")
- val execTask = taskResultSetSizeEvent.execTask
- val rootExecTask = execTask.getPhysicalContext.getRootTask
- val runners = getRunningTask(rootExecTask)
- runners.find(_.task.getId.equals(execTask.getId)).foreach {
+ debug(s"received taskResultSetSizeEvent $taskResultSetSizeEvent")
+ findDealEventTaskRunner(taskResultSetSizeEvent).foreach {
case asyncExecTaskRunner: AsyncExecTaskRunner =>
asyncExecTaskRunner.setResultSize(taskResultSetSizeEvent.resultSize)
case _ =>
@@ -73,25 +67,20 @@
}
override def onTaskErrorResponseEvent(taskErrorResponseEvent: TaskErrorResponseEvent): Unit = {
- info(s"received taskErrorResponseEvent $taskErrorResponseEvent")
- val execTask = taskErrorResponseEvent.execTask
- val rootExecTask = execTask.getPhysicalContext.getRootTask
- val runners = getRunningTask(rootExecTask)
- runners.find(_.task.getId.equals(execTask.getId)).foreach {
+
+ findDealEventTaskRunner(taskErrorResponseEvent).foreach {
case asyncExecTaskRunner: AsyncExecTaskRunner =>
+ info(s"received taskErrorResponseEvent $taskErrorResponseEvent")
asyncExecTaskRunner.markFailed(taskErrorResponseEvent.errorMsg, null)
case _ =>
}
}
override def onStatusUpdate(taskStatusEvent: TaskStatusEvent): Unit = {
- info(s"received taskStatusEvent $taskStatusEvent")
+ debug(s"received taskStatusEvent $taskStatusEvent")
if (ExecutionNodeStatus.isCompleted(taskStatusEvent.status)) {
- val execTask = taskStatusEvent.execTask
- val rootExecTask = execTask.getPhysicalContext.getRootTask
- val runners = getRunningTask(rootExecTask)
- runners.find(_.task.getId.equals(execTask.getId)).foreach { runner =>
- info(s"Task(${execTask.getIDInfo}) is completed, status ${taskStatusEvent.status}")
+ findDealEventTaskRunner(taskStatusEvent).foreach { runner =>
+ info(s"Task(${taskStatusEvent.execTask.getIDInfo()}) is completed, status ${taskStatusEvent.status}")
//To transient taskRunner status
runner.transientStatus(taskStatusEvent.status)
//addCompletedTask(runner)
@@ -99,6 +88,19 @@
}
}
+ private def findDealEventTaskRunner(event: TaskInfoEvent): Option[ExecTaskRunner] = {
+ val execTask = event.execTask
+ val rootExecTask = execTask.getPhysicalContext.getRootTask
+ val runners = getRunningTask(rootExecTask).filter{ taskRunner =>
+ taskRunner.task match {
+ case asyncExecTask: AsyncExecTask =>
+ asyncExecTask.canDealEvent(event)
+ case _ => false
+ }
+ }
+ runners.headOption
+ }
+
override protected def execTaskToTaskRunner(execTask: ExecTask): ExecTaskRunner = {
new AsyncExecTaskRunnerImpl(execTask)
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml
index 2b24b99..5245dfa 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis-orchestrator</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/ComputationEngineConnManager.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/ComputationEngineConnManager.scala
index 014e7d0..dd587e1 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/ComputationEngineConnManager.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/ComputationEngineConnManager.scala
@@ -16,17 +16,19 @@
package com.webank.wedatasphere.linkis.orchestrator.ecm
+import java.net.SocketTimeoutException
import java.util
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import com.webank.wedatasphere.linkis.common.ServiceInstance
import com.webank.wedatasphere.linkis.common.exception.LinkisRetryException
-import com.webank.wedatasphere.linkis.common.utils.Logging
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
import com.webank.wedatasphere.linkis.governance.common.conf.GovernanceCommonConf
import com.webank.wedatasphere.linkis.manager.common.entity.node.EngineNode
import com.webank.wedatasphere.linkis.manager.common.protocol.engine.{EngineAskAsyncResponse, EngineAskRequest, EngineCreateError, EngineCreateSuccess}
import com.webank.wedatasphere.linkis.manager.label.constant.LabelKeyConstant
+import com.webank.wedatasphere.linkis.manager.label.entity.entrance.BindEngineLabel
import com.webank.wedatasphere.linkis.orchestrator.ecm.cache.EngineAsyncResponseCache
import com.webank.wedatasphere.linkis.orchestrator.ecm.conf.ECMPluginConf
import com.webank.wedatasphere.linkis.orchestrator.ecm.entity.{DefaultMark, Mark, MarkReq, Policy}
@@ -35,6 +37,7 @@
import com.webank.wedatasphere.linkis.orchestrator.ecm.service.impl.{ComputationConcurrentEngineConnExecutor, ComputationEngineConnExecutor}
import com.webank.wedatasphere.linkis.rpc.Sender
import com.webank.wedatasphere.linkis.rpc.exception.DWCRPCRetryException
+import org.apache.commons.lang.exception.ExceptionUtils
import scala.collection.JavaConversions._
import scala.concurrent.duration.Duration
@@ -54,10 +57,24 @@
override def applyMark(markReq: MarkReq): Mark = {
if (null == markReq) return null
- MARK_CACHE_LOCKER.synchronized {
+ val mark = MARK_CACHE_LOCKER.synchronized {
val markCache = getMarkCache().keys
- val maybeMark = markCache.find(_.getMarkReq.equals(markReq))
- maybeMark.orNull
+ val maybeMark = markCache.find(_.getMarkReq.equals(markReq))
+ maybeMark.orNull
+ }
+ if (null == mark) {
+ if (markReq.getLabels.containsKey(LabelKeyConstant.BIND_ENGINE_KEY)) {
+ val bindEngineLabel = MarkReq.getLabelBuilderFactory.createLabel[BindEngineLabel](LabelKeyConstant.BIND_ENGINE_KEY,
+ markReq.getLabels.get(LabelKeyConstant.BIND_ENGINE_KEY))
+ if (!bindEngineLabel.getIsJobGroupHead) {
+ val msg = s"Cannot find mark related to bindEngineLabel : ${bindEngineLabel.getStringValue}"
+ error(msg)
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_MARK_CACHE_ERROR_CODE, msg)
+ }
+ }
+ createMark(markReq)
+ } else {
+ mark
}
}
@@ -72,7 +89,7 @@
"mark_" + idCreator.getAndIncrement()
}
- override protected def askEngineConnExecutor(engineAskRequest: EngineAskRequest): EngineConnExecutor = {
+ override protected def askEngineConnExecutor(engineAskRequest: EngineAskRequest, mark: Mark): EngineConnExecutor = {
engineAskRequest.setTimeOut(getEngineConnApplyTime)
var count = getEngineConnApplyAttempts()
var retryException: LinkisRetryException = null
@@ -80,7 +97,7 @@
count = count - 1
val start = System.currentTimeMillis()
try {
- val engineNode = getEngineNodeAskManager(engineAskRequest)
+ val engineNode = getEngineNodeAskManager(engineAskRequest, mark)
if (null != engineNode) {
val engineConnExecutor = if (null != engineAskRequest.getLabels &&
engineAskRequest.getLabels.containsKey(LabelKeyConstant.CONCURRENT_ENGINE_KEY)) {
@@ -96,11 +113,11 @@
} catch {
case t: LinkisRetryException =>
val taken = System.currentTimeMillis() - start
- error(s"Failed to askEngineAskRequest time taken ($taken), with DWCRPCRetryException", t)
+ warn(s"${mark.getMarkId()} Failed to askEngineAskRequest time taken ($taken), ${t.getMessage}")
retryException = t
case t: Throwable =>
val taken = System.currentTimeMillis() - start
- error(s"Failed to askEngineAskRequest time taken ($taken), ${t.getMessage}")
+ warn(s"${mark.getMarkId()} Failed to askEngineAskRequest time taken ($taken)")
throw t
}
}
@@ -108,31 +125,40 @@
throw retryException
}else{
throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE,
- s"Failed to ask engineAskRequest $engineAskRequest by retry ${getEngineConnApplyAttempts - count} ")
+ s"${mark.getMarkId()} Failed to ask engineAskRequest $engineAskRequest by retry ${getEngineConnApplyAttempts - count} ")
}
}
- private def getEngineNodeAskManager(engineAskRequest: EngineAskRequest): EngineNode = {
- getManagerSender().ask(engineAskRequest) match {
+ private def getEngineNodeAskManager(engineAskRequest: EngineAskRequest, mark: Mark): EngineNode = {
+ val response = Utils.tryCatch(getManagerSender().ask(engineAskRequest)) { t: Throwable =>
+ ExceptionUtils.getRootCause(t) match {
+ case socketTimeoutException: SocketTimeoutException =>
+ val msg = s"mark ${mark.getMarkId()} failed to ask linkis Manager Can be retried " + ExceptionUtils.getRootCauseMessage(t)
+ throw new LinkisRetryException(ECMPluginConf.ECM_ENGNE_CREATION_ERROR_CODE, msg)
+ case _ =>
+ throw t
+ }
+ }
+ response match {
case engineNode: EngineNode =>
- info(s"Succeed to get engineNode $engineNode")
+ debug(s"Succeed to get engineNode $engineNode mark ${mark.getMarkId()}")
engineNode
case EngineAskAsyncResponse(id, serviceInstance) =>
- info(s"received EngineAskAsyncResponse id: ${id} serviceInstance: $serviceInstance")
- cacheMap.getAndRemove(id, Duration(engineAskRequest.getTimeOut + 60000, TimeUnit.MILLISECONDS)) match {
+ info(s"${mark.getMarkId()} received EngineAskAsyncResponse id: ${id} serviceInstance: $serviceInstance ")
+ cacheMap.getAndRemove(id, Duration(engineAskRequest.getTimeOut + 100000, TimeUnit.MILLISECONDS)) match {
case EngineCreateSuccess(id, engineNode) =>
- info(s"id:$id success to async get EngineNode $engineNode")
+ info(s"${mark.getMarkId()} async id:$id success to async get EngineNode $engineNode")
engineNode
case EngineCreateError(id, exception, retry) =>
- error(s"id:$id Failed to async get EngineNode, $exception")
+ debug(s"${mark.getMarkId()} async id:$id Failed to async get EngineNode, $exception")
if(retry){
- throw new LinkisRetryException(ECMPluginConf.ECM_ENGNE_CREATION_ERROR_CODE, exception)
+ throw new LinkisRetryException(ECMPluginConf.ECM_ENGNE_CREATION_ERROR_CODE, id + " Failed to async get EngineNode" + exception)
}else{
- throw new ECMPluginErrorException(ECMPluginConf.ECM_ENGNE_CREATION_ERROR_CODE, exception)
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_ENGNE_CREATION_ERROR_CODE, id + " Failed to async get EngineNode " + exception)
}
}
case _ =>
- info(s"Failed to ask engineAskRequest $engineAskRequest, response is not engineNode")
+ info(s"${mark.getMarkId()} Failed to ask engineAskRequest $engineAskRequest, response is not engineNode")
null
}
}
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManager.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManager.scala
index 7347548..3bbb8fe 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManager.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManager.scala
@@ -128,10 +128,10 @@
case Some(engineConnExecutor) => return engineConnExecutor
case None =>
}
- val engineConnExecutor = askEngineConnExecutor(mark.getMarkReq.createEngineConnAskReq())
+ val engineConnExecutor = askEngineConnExecutor(mark.getMarkReq.createEngineConnAskReq(), mark)
engineConnExecutor.useEngineConn
saveToMarkCache(mark, engineConnExecutor)
- info(s"mark ${mark.getMarkId()} Finished to getAvailableEngineConnExecutor by create")
+ debug(s"mark ${mark.getMarkId()} Finished to getAvailableEngineConnExecutor by create")
engineConnExecutor
} else {
throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, " mark cannot null")
@@ -208,11 +208,11 @@
}
}
- protected def askEngineConnExecutor(engineAskRequest: EngineAskRequest): EngineConnExecutor
+ protected def askEngineConnExecutor(engineAskRequest: EngineAskRequest, mark: Mark): EngineConnExecutor
override def releaseMark(mark: Mark): Unit = {
if (null != mark && getMarkCache().containsKey(mark)) {
- info(s"Start to release mark ${mark.getMarkId()}")
+ debug(s"Start to release mark ${mark.getMarkId()}")
val executors = getMarkCache().get(mark).map(getEngineConnExecutorCache().get(_))
Utils.tryAndError(executors.foreach { executor =>
getEngineConnExecutorCache().remove(executor.getServiceInstance)
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManagerBuilder.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManagerBuilder.scala
index f8b0c1e..e0c1b43 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManagerBuilder.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/EngineConnManagerBuilder.scala
@@ -23,7 +23,10 @@
import com.webank.wedatasphere.linkis.orchestrator.ecm.entity.Policy
-
+/**
+ *
+ *
+ */
trait EngineConnManagerBuilder {
def setPolicy(policy: Policy): EngineConnManagerBuilder
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala
index b91d90f..cfc58c2 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/LoadBalanceLabelEngineConnManager.scala
@@ -10,7 +10,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
package com.webank.wedatasphere.linkis.orchestrator.ecm
import java.util
@@ -43,90 +42,93 @@
private val MARK_REQ_CACHE_LOCKER = new Object()
-
- override def applyMark(markReq: MarkReq): Mark = {
- if (null == markReq) return null
- val markReqCache = MARK_REQ_CACHE_LOCKER.synchronized {
- getMarkReqAndMarkCache().keys
- }
- var mayBeMarkReq = markReqCache.find(_.equals(markReq)).orNull
- val markNum: Int = {
- if (markReq.getLabels.containsKey(LabelKeyConstant.LOAD_BALANCE_KEY)) {
- val loadBalanceLabel = MarkReq.getLabelBuilderFactory.createLabel[LoadBalanceLabel](LabelKeyConstant.LOAD_BALANCE_KEY,
- markReq.getLabels.get(LabelKeyConstant.LOAD_BALANCE_KEY))
- if (loadBalanceLabel.getCapacity > 0) {
- loadBalanceLabel.getCapacity
- } else {
- ECMPluginConf.DEFAULT_LOADBALANCE_CAPACITY.getValue
- }
+ private def getMarkNumByMarReq(markReq: MarkReq): Int = {
+ if (markReq.getLabels.containsKey(LabelKeyConstant.LOAD_BALANCE_KEY)) {
+ val loadBalanceLabel = MarkReq.getLabelBuilderFactory.createLabel[LoadBalanceLabel](LabelKeyConstant.LOAD_BALANCE_KEY,
+ markReq.getLabels.get(LabelKeyConstant.LOAD_BALANCE_KEY))
+ if (loadBalanceLabel.getCapacity > 0) {
+ loadBalanceLabel.getCapacity
} else {
- error(s"There must be LoadBalanceLabel in markReq : ${BDPJettyServerHelper.gson.toJson(markReq)}")
ECMPluginConf.DEFAULT_LOADBALANCE_CAPACITY.getValue
}
+ } else {
+ error(s"There must be LoadBalanceLabel in markReq : ${BDPJettyServerHelper.gson.toJson(markReq)}")
+ ECMPluginConf.DEFAULT_LOADBALANCE_CAPACITY.getValue
}
- var count = 0
- if (null != mayBeMarkReq && !getMarkReqAndMarkCache().get(mayBeMarkReq).isEmpty) {
- count = getMarkReqAndMarkCache().get(mayBeMarkReq).size()
- }
+ }
+ /**
+ * 申请获取一个Mark
+ * 1. 如果没有对应的Mark就生成新的
+ * 2. 一个MarkRequest对应多个Mark,一个Mark对应一个Engine
+ * 3. 如果存在bindEngineLabel则需要在jobStart的时候随机选择一个,并缓存给后续jobGroup使用,在jobEnd的时候删除缓存
+ * 4. 将Mark进行返回
+ *
+ * @param markReq
+ * @return
+ */
+ override def applyMark(markReq: MarkReq): Mark = {
+ if (null == markReq) return null
+
+ val markNum: Int = getMarkNumByMarReq(markReq)
+ /*val markReqCache = MARK_REQ_CACHE_LOCKER.synchronized {
+ getMarkReqAndMarkCache().keys
+ }
+ var mayBeMarkReq = markReqCache.find(_.equals(markReq)).orNull*/
+ var count = 0
+ if (getMarkReqAndMarkCache().containsKey(markReq)) {
+ count = getMarkReqAndMarkCache().get(markReq).size()
+ }
// toto check if count >= markNum, will not add more mark
while (count < markNum) {
- count += 1
createMark(markReq)
- }
- if (null == mayBeMarkReq) {
- val markReqCache2 = MARK_REQ_CACHE_LOCKER.synchronized {
- getMarkReqAndMarkCache().keys
- }
- mayBeMarkReq = markReqCache2.find(_.equals(markReq)).get
+ count = getMarkReqAndMarkCache().get(markReq).size()
}
// markReq is in cache, and mark list is ready
- val markList = getMarkReqAndMarkCache().get(mayBeMarkReq)
- var choosedMark: Mark = null
- if (mayBeMarkReq.getLabels.containsKey(LabelKeyConstant.BIND_ENGINE_KEY)) {
+ val markList = getMarkReqAndMarkCache().get(markReq)
+ var chooseMark: Mark = null
+ if (markReq.getLabels.containsKey(LabelKeyConstant.BIND_ENGINE_KEY)) {
val bindEngineLabel = MarkReq.getLabelBuilderFactory.createLabel[BindEngineLabel](LabelKeyConstant.BIND_ENGINE_KEY,
- mayBeMarkReq.getLabels.get(LabelKeyConstant.BIND_ENGINE_KEY))
+ markReq.getLabels.get(LabelKeyConstant.BIND_ENGINE_KEY))
if (bindEngineLabel.getIsJobGroupHead) {
// getRandom mark
- choosedMark = markList.get(new util.Random().nextInt(markList.length))
- choosedMark.asInstanceOf[LoadBalanceMark].setTaskMarkReq(markReq)
- getIdToMarkCache().put(bindEngineLabel.getJobGroupId, choosedMark)
- } else {
- // get mark from cache
- if (getIdToMarkCache().containsKey(bindEngineLabel.getJobGroupId)) {
- choosedMark = getIdToMarkCache().get(bindEngineLabel.getJobGroupId)
- choosedMark.asInstanceOf[LoadBalanceMark].setTaskMarkReq(markReq)
- val insList = getMarkCache().get(choosedMark)
- if (null == insList || insList.size() != 1) {
- val msg = s"Engine instance releated to choosedMark : ${BDPJettyServerHelper.gson.toJson(choosedMark)} with bindEngineLabel : ${bindEngineLabel.getStringValue} cannot be null"
- error(msg)
- throw new ECMPluginErrorException(ECMPluginConf.ECM_CACHE_ERROR_CODE, msg)
- }
- } else {
- val msg = s"Cannot find mark related to bindEngineLabel : ${bindEngineLabel.getStringValue}"
+ chooseMark = markList.get(new util.Random().nextInt(markList.length))
+ //chooseMark.asInstanceOf[LoadBalanceMark].setTaskMarkReq(markReq)
+ getIdToMarkCache().put(bindEngineLabel.getJobGroupId, chooseMark)
+ } else if (getIdToMarkCache().containsKey(bindEngineLabel.getJobGroupId)) {
+ chooseMark = getIdToMarkCache().get(bindEngineLabel.getJobGroupId)
+ //chooseMark.asInstanceOf[LoadBalanceMark].setTaskMarkReq(markReq)
+ val insList = getMarkCache().get(chooseMark)
+ if (null == insList || insList.size() != 1) {
+ val msg = s"Engine instance releated to chooseMark : ${BDPJettyServerHelper.gson.toJson(chooseMark)} with bindEngineLabel : ${bindEngineLabel.getStringValue} cannot be null"
error(msg)
- throw new ECMPluginErrorException(ECMPluginConf.ECM_CACHE_ERROR_CODE, msg)
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_MARK_CACHE_ERROR_CODE, msg)
}
+ } else {
+ val msg = s"Cannot find mark${chooseMark.getMarkId()} related to bindEngineLabel : ${bindEngineLabel.getStringValue}"
+ error(msg)
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_MARK_CACHE_ERROR_CODE, msg)
}
if (bindEngineLabel.getIsJobGroupEnd) {
if (getIdToMarkCache().containsKey(bindEngineLabel.getJobGroupId)) {
+ info(s"Start to remove mark${chooseMark.getMarkId()} Cache ${bindEngineLabel.getStringValue}")
getIdToMarkCache().remove(bindEngineLabel.getJobGroupId)
} else {
- error(s"Cannot find mark related to bindEngineLabel : ${bindEngineLabel.getStringValue}, cannot remove it.")
+ error(s"Cannot find mark${chooseMark.getMarkId()} related to bindEngineLabel : ${bindEngineLabel.getStringValue}, cannot remove it.")
}
}
} else {
// treat as isHead and isEnd
- choosedMark = markList.get(new Random().nextInt(count))
+ chooseMark = markList.get(new Random().nextInt(count))
}
- choosedMark
+ chooseMark
}
/**
- * 1. Create a new Mark
- * 2. There will be a process of requesting the engine when generating a new Mark. If the request is received, it will be stored in the Map: Mark is Key, EngineConnExecutor is Value
- * 3. The number of Marks generated is equal to the concurrent amount of LoadBalance
- */
+ * 1. 创建一个新的Mark
+ * 2. 生成新的Mark会存在请求引擎的过程,如果请求到了则存入Map中:Mark为Key,EngineConnExecutor为Value
+ * 3. 生成的Mark数量等于LoadBalance的并发量
+ */
override def createMark(markReq: MarkReq): Mark = {
val mark = new LoadBalanceMark(nextMarkId(), markReq)
addMark(mark, new util.ArrayList[ServiceInstance]())
@@ -135,14 +137,10 @@
}
private def addMarkReqAndMark(req: MarkReq, mark: DefaultMark): Unit = {
- if (null != req) {
- val markReqCache = MARK_REQ_CACHE_LOCKER.synchronized {
- getMarkReqAndMarkCache().keys
- }
- val maybeMarkReq = markReqCache.find(_.eq(req)).getOrElse(null)
- if (null != maybeMarkReq) {
- val markList = markReqAndMarkCache.get(maybeMarkReq)
- val mayBeMark = markList.find(_.getMarkId().equals(mark.getMarkId())).getOrElse(null)
+ if (null != req) MARK_REQ_CACHE_LOCKER.synchronized {
+ val markList = getMarkReqAndMarkCache().get(req)
+ if (null != markList) {
+ val mayBeMark = markList.find(_.getMarkId().equals(mark.getMarkId())).orNull
if (null == mayBeMark) {
markList.add(mark)
} else {
@@ -157,39 +155,32 @@
}
override def getAvailableEngineConnExecutor(mark: Mark): EngineConnExecutor = {
- info(s"mark ${mark.getMarkId()} start to tryReuseEngineConnExecutor")
+
if (null != mark && getMarkCache().containsKey(mark)) {
tryReuseEngineConnExecutor(mark) match {
- case Some(engineConnExecutor) => return engineConnExecutor
+ case Some(engineConnExecutor) =>
+ info(s"mark ${mark.getMarkId()} ReuseEngineConnExecutor $engineConnExecutor")
+ return engineConnExecutor
case None =>
- }
- } else {
- throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"mark cannot be null")
}
- info(s"Start to askEngineConnExecutor for marks in MarkCache")
- getMarkCache().keySet().foreach{ cachedMark =>
- val serviceInstances = getMarkCache().get(cachedMark)
- if(CollectionUtils.isEmpty(serviceInstances)){
- info(s"mark ${cachedMark.getMarkId()} start to askEngineConnExecutor")
- val engineConnAskReq = cachedMark.getMarkReq.createEngineConnAskReq()
+ info(s"mark ${mark.getMarkId()} start to askEngineConnExecutor")
+ val engineConnAskReq = mark.getMarkReq.createEngineConnAskReq()
+ val existInstances = getAllInstances()
+ if (null != existInstances && existInstances.nonEmpty) {
val reuseExclusionLabel = MarkReq.getLabelBuilderFactory.createLabel(classOf[ReuseExclusionLabel])
reuseExclusionLabel.setInstances(getAllInstances())
engineConnAskReq.getLabels.put(LabelKeyConstant.REUSE_EXCLUSION_KEY, reuseExclusionLabel.getValue)
- val engineConnExecutor = askEngineConnExecutor(engineConnAskReq)
- saveToMarkCache(cachedMark, engineConnExecutor)
- info(s"mark ${cachedMark.getMarkId()} Finished to getAvailableEngineConnExecutor by create")
}
- }
- tryReuseEngineConnExecutor(mark) match {
- case Some(engineConnExecutor) =>
- engineConnExecutor.useEngineConn
- return engineConnExecutor
- case None =>
- throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"mark ${mark.getMarkId()} failed to start engineConn")
+ val engineConnExecutor = askEngineConnExecutor(engineConnAskReq, mark)
+ saveToMarkCache(mark, engineConnExecutor)
+ debug(s"mark ${mark.getMarkId()} Finished to getAvailableEngineConnExecutor by create")
+ engineConnExecutor
+ } else {
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"mark cannot be null")
}
}
- override protected def tryReuseEngineConnExecutor(mark: Mark):Option[EngineConnExecutor] = {
+ /*override protected def tryReuseEngineConnExecutor(mark: Mark): Option[EngineConnExecutor] = {
val instances = getInstances(mark)
if (null == instances || instances.isEmpty) {
return None
@@ -228,8 +219,34 @@
return None
}
}
- }
+ }*/
+ override def releaseEngineConnExecutor(engineConnExecutor: EngineConnExecutor, mark: Mark): Unit = {
+ if (null != engineConnExecutor && null != mark && getMarkCache().containsKey(mark)) {
+ info(s"Start to release EngineConnExecutor mark id ${mark.getMarkId()} engineConnExecutor ${engineConnExecutor.getServiceInstance}")
+ getEngineConnExecutorCache().remove(engineConnExecutor.getServiceInstance)
+ engineConnExecutor.close()
+ val instances = getInstances(mark)
+ if (null != instances) {
+ instances.remove(engineConnExecutor.getServiceInstance)
+ if (instances.isEmpty) releaseMark(mark)
+ }
+ }
+ if (!getMarkCache().containsKey(mark)) MARK_REQ_CACHE_LOCKER.synchronized {
+ val marks = getMarkReqAndMarkCache().get(mark.getMarkReq)
+ if (null == marks || marks.isEmpty) {
+ getMarkReqAndMarkCache().remove(mark.getMarkReq)
+ } else {
+ val newMarks = marks.filter(!_.getMarkId().equals(mark.getMarkId()))
+ if (null == newMarks || newMarks.isEmpty) {
+ getMarkReqAndMarkCache().remove(mark.getMarkReq)
+ } else {
+ getMarkReqAndMarkCache().put(mark.getMarkReq, newMarks)
+ }
+ //getMarkReqAndMarkCache().put(mark.getMarkReq))
+ }
+ }
+ }
protected def getAllInstances(): Array[String] = MARK_CACHE_LOCKER.synchronized {
val instances = new ArrayBuffer[String]
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/cache/EngineAsyncResponseCache.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/cache/EngineAsyncResponseCache.scala
index d82e5cb..1597699 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/cache/EngineAsyncResponseCache.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/cache/EngineAsyncResponseCache.scala
@@ -16,15 +16,24 @@
package com.webank.wedatasphere.linkis.orchestrator.ecm.cache
-import com.webank.wedatasphere.linkis.common.utils.Utils
-import com.webank.wedatasphere.linkis.manager.common.protocol.engine.{EngineAsyncResponse, EngineCreateError}
+import java.util.concurrent.{TimeUnit, TimeoutException}
+
+import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
+import com.webank.wedatasphere.linkis.governance.common.conf.GovernanceCommonConf
+import com.webank.wedatasphere.linkis.manager.common.protocol.RequestManagerUnlock
+import com.webank.wedatasphere.linkis.manager.common.protocol.engine.{EngineAsyncResponse, EngineCreateError, EngineCreateSuccess}
import com.webank.wedatasphere.linkis.orchestrator.ecm.conf.ECMPluginConf
import com.webank.wedatasphere.linkis.orchestrator.ecm.exception.ECMPluginCacheException
+import com.webank.wedatasphere.linkis.rpc.Sender
import org.apache.commons.lang.exception.ExceptionUtils
+import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration.Duration
-
+/**
+ *
+ *
+ */
trait EngineAsyncResponseCache {
@throws[ECMPluginCacheException]
@@ -45,23 +54,41 @@
}
-class EngineAsyncResponseCacheMap extends EngineAsyncResponseCache {
+case class EngineAsyncResponseEntity(engineAsyncResponse: EngineAsyncResponse, createTime: Long)
- private val cacheMap: java.util.Map[String, EngineAsyncResponse] = new java.util.concurrent.ConcurrentHashMap[String, EngineAsyncResponse]()
+class EngineAsyncResponseCacheMap extends EngineAsyncResponseCache with Logging {
+
+ private val cacheMap: java.util.Map[String, EngineAsyncResponseEntity] = new java.util.concurrent.ConcurrentHashMap[String, EngineAsyncResponseEntity]()
+
+ private val expireTime = ECMPluginConf.EC_ASYNC_RESPONSE_CLEAR_TIME.getValue.toLong
+
+ init()
override def get(id: String, timeout: Duration): EngineAsyncResponse = {
Utils.waitUntil(() => cacheMap.containsKey(id), timeout)
- cacheMap.get(id)
+ val engineAsyncResponseEntity = cacheMap.get(id)
+ if (null != engineAsyncResponseEntity) {
+ engineAsyncResponseEntity.engineAsyncResponse
+ } else {
+ EngineCreateError(id, "async info null", retry = true)
+ }
}
override def getAndRemove(id: String, timeout: Duration): EngineAsyncResponse = {
- try {
+ Utils.tryCatch {
Utils.waitUntil(() => cacheMap.containsKey(id), timeout)
- } catch {
+ } {
+ case t: TimeoutException =>
+ put(id, EngineCreateError(id, s"Asynchronous request engine timeout(请求引擎超时,可能是因为资源不足,您可以选择重试),async id $id", retry = true))
case t: Throwable =>
put(id, EngineCreateError(id, ExceptionUtils.getRootCauseStackTrace(t).mkString("\n")))
}
- cacheMap.remove(id)
+ val engineAsyncResponseEntity = cacheMap.remove(id)
+ if (null != engineAsyncResponseEntity) {
+ engineAsyncResponseEntity.engineAsyncResponse
+ } else {
+ EngineCreateError(id, "async info null", retry = true)
+ }
}
@throws[ECMPluginCacheException]
@@ -70,7 +97,40 @@
cacheMap.remove(id)
throw new ECMPluginCacheException(ECMPluginConf.ECM_CACHE_ERROR_CODE, "id duplicate")
}
- cacheMap.put(id, engineAsyncResponse)
+ cacheMap.put(id, EngineAsyncResponseEntity(engineAsyncResponse, System.currentTimeMillis()))
}
+ def init(): Unit = {
+ info(s"Start cache map clear defaultScheduler")
+ Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
+ override def run(): Unit = try {
+
+ val iterator = cacheMap.entrySet().iterator()
+ val expireBuffer = new ArrayBuffer[String]()
+ while (iterator.hasNext) {
+ val keyValue = iterator.next()
+ val curTime = System.currentTimeMillis() - expireTime
+ if (null != keyValue.getValue && keyValue.getValue.createTime < curTime) {
+ expireBuffer += keyValue.getKey
+ }
+ }
+ expireBuffer.foreach { key =>
+ info(s" to clear engineAsyncResponseEntity key $key")
+ val engineAsyncResponseEntity = cacheMap.remove(key)
+ if (null != engineAsyncResponseEntity && engineAsyncResponseEntity.engineAsyncResponse.isInstanceOf[EngineCreateSuccess]) {
+ val engineCreateSuccess = engineAsyncResponseEntity.engineAsyncResponse.asInstanceOf[EngineCreateSuccess]
+ info(s"clear engineCreateSuccess, to unlock $engineCreateSuccess")
+ val requestManagerUnlock = RequestManagerUnlock(engineCreateSuccess.engineNode.getServiceInstance, engineCreateSuccess.engineNode.getLock, Sender.getThisServiceInstance)
+ getManagerSender.send(requestManagerUnlock)
+ }
+ }
+ } catch {
+ case throwable: Throwable =>
+ error("Failed to clear EngineAsyncResponseCacheMap", throwable)
+ }
+ }, 60000, expireTime, TimeUnit.MILLISECONDS)
+ }
+
+ private def getManagerSender: Sender = Sender.getSender(GovernanceCommonConf.MANAGER_SPRING_NAME.getValue)
+
}
\ No newline at end of file
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/conf/ECMPluginConf.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/conf/ECMPluginConf.scala
index 3064ff8..7127059 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/conf/ECMPluginConf.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/conf/ECMPluginConf.scala
@@ -38,7 +38,11 @@
val ECM_ENGINE_CACHE_ERROR = 12004
+ val ECM_MARK_CACHE_ERROR_CODE = 12005
+
val DEFAULT_LOADBALANCE_CAPACITY = CommonVars("wds.linkis.orchestrator.ecm.loadbalance.capacity.default", 3)
+ val EC_ASYNC_RESPONSE_CLEAR_TIME = CommonVars("wds.linkis.ecp.ec.response.time", new TimeType("3m"))
+
}
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/Mark.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/Mark.scala
index dd88665..6d747cd 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/Mark.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/Mark.scala
@@ -16,7 +16,10 @@
package com.webank.wedatasphere.linkis.orchestrator.ecm.entity
-
+/**
+ *
+ *
+ */
trait Mark {
def getMarkId(): String
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/MarkReq.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/MarkReq.scala
index 6392076..858297e 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/MarkReq.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/entity/MarkReq.scala
@@ -30,7 +30,10 @@
import scala.collection.JavaConversions._
-
+/**
+ *
+ *
+ */
trait MarkReq {
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/EngineConnExecutor.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/EngineConnExecutor.scala
index b7ad2a0..e122770 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/EngineConnExecutor.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/EngineConnExecutor.scala
@@ -78,6 +78,9 @@
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
+ override def toString: String = {
+ "" + getServiceInstance
+ }
}
diff --git a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/impl/ComputationEngineConnExecutor.scala b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/impl/ComputationEngineConnExecutor.scala
index 37b9fe4..eb07f14 100644
--- a/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/impl/ComputationEngineConnExecutor.scala
+++ b/linkis-orchestrator/plugin/linkis-orchestrator-ecm-plugin/src/main/scala/com/webank/wedatasphere/linkis/orchestrator/ecm/service/impl/ComputationEngineConnExecutor.scala
@@ -43,11 +43,11 @@
private def getEngineConnSender: Sender = Sender.getSender(getServiceInstance)
override def close(): Unit = {
- info(s"Start to release engineConn $getServiceInstance")
+ debug(s"Start to release engineConn $getServiceInstance")
val requestManagerUnlock = RequestManagerUnlock(getServiceInstance, locker, Sender.getThisServiceInstance)
killAll()
getManagerSender.send(requestManagerUnlock)
- info(s"Finished to release engineConn $getServiceInstance")
+ debug(s"Finished to release engineConn $getServiceInstance")
}
override def useEngineConn: Boolean = {
@@ -66,35 +66,35 @@
}
override def execute(requestTask: RequestTask): ExecuteResponse = {
- info(s"Start to submit task to engineConn($getServiceInstance)")
+ debug(s"Start to submit task${requestTask.getSourceID()} to engineConn($getServiceInstance)")
requestTask.setLock(this.locker)
getEngineConnSender.ask(requestTask) match {
case submitResponse: SubmitResponse =>
- info(s"Succeed to submit task to engineConn($getServiceInstance), Get asyncResponse execID is ${submitResponse}")
+ info(s"Succeed to submit task${requestTask.getSourceID()} to engineConn($getServiceInstance), Get asyncResponse execID is ${submitResponse}")
getRunningTasks.put(submitResponse.taskId, requestTask)
submitResponse
case outPutResponse: OutputExecuteResponse =>
- info(s" engineConn($getServiceInstance) Succeed to execute task, and get Res")
+ info(s" engineConn($getServiceInstance) Succeed to execute task${requestTask.getSourceID()}, and get Res")
outPutResponse
case errorExecuteResponse: ErrorExecuteResponse =>
- error(s"engineConn($getServiceInstance) Failed to execute task ,error msg ${errorExecuteResponse.message}", errorExecuteResponse.t)
+ error(s"engineConn($getServiceInstance) Failed to execute task${requestTask.getSourceID()} ,error msg ${errorExecuteResponse.message}", errorExecuteResponse.t)
errorExecuteResponse
case successExecuteResponse: SuccessExecuteResponse =>
- info(s" engineConn($getServiceInstance) Succeed to execute task, no res")
+ info(s" engineConn($getServiceInstance) Succeed to execute task${requestTask.getSourceID()}, no res")
successExecuteResponse
case _ =>
- throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"engineConn($getServiceInstance) Failed to execute task, get response error")
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"engineConn($getServiceInstance) Failed to execute task${requestTask.getSourceID()}, get response error")
}
}
override def killTask(execId: String): Boolean = {
Utils.tryCatch {
- info(s"begin to send RequestTaskKill to engine, execID: $execId")
+ info(s"begin to send RequestTaskKill to engineConn($getServiceInstance), execID: $execId")
getEngineConnSender.send(RequestTaskKill(execId))
- info(s"Finished to send RequestTaskKill to engine, execID: $execId")
+ info(s"Finished to send RequestTaskKill to engineConn($getServiceInstance), execID: $execId")
true
} { t: Throwable =>
- error(s"Failed to kill task $execId", t)
+ error(s"Failed to kill task $execId engineConn($getServiceInstance)", t)
false
}
}
@@ -136,7 +136,7 @@
case ResponseTaskStatus(execId, status) =>
status
case _ =>
- throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"Failed to get engineConn status ")
+ throw new ECMPluginErrorException(ECMPluginConf.ECM_ERROR_CODE, s"Failed to get engineConn($getServiceInstance) status ")
}
}
diff --git a/linkis-orchestrator/pom.xml b/linkis-orchestrator/pom.xml
index f69f8fe..a58a767 100644
--- a/linkis-orchestrator/pom.xml
+++ b/linkis-orchestrator/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml
index 4516d92..f2e1b09 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/AbstractBmlClient.java b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/AbstractBmlClient.java
index 9a8e911..6b5d191 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/AbstractBmlClient.java
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/AbstractBmlClient.java
@@ -15,14 +15,8 @@
*/
package com.webank.wedatasphere.linkis.bml.client;
-import java.util.Map;
-
-
public abstract class AbstractBmlClient implements BmlClient {
protected String user;
- protected java.util.Map<String, Object> properties;
-
- public abstract void init();
public String getUser() {
return user;
@@ -31,12 +25,4 @@
public void setUser(String user) {
this.user = user;
}
-
- public Map<String, Object> getProperties() {
- return properties;
- }
-
- public void setProperties(Map<String, Object> properties) {
- this.properties = properties;
- }
}
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/BmlClientFactory.java b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/BmlClientFactory.java
index e4db90c..88a7234 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/BmlClientFactory.java
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/java/com/webank/wedatasphere/linkis/bml/client/BmlClientFactory.java
@@ -16,33 +16,26 @@
package com.webank.wedatasphere.linkis.bml.client;
import com.webank.wedatasphere.linkis.bml.client.impl.HttpBmlClient;
-import com.webank.wedatasphere.linkis.bml.http.HttpConf;
+import com.webank.wedatasphere.linkis.httpclient.dws.config.DWSClientConfig;
import java.util.Map;
public class BmlClientFactory {
+
public static BmlClient createBmlClient(){
- return createBmlClient(null, null, null);
+ return new HttpBmlClient();
}
-
- public static BmlClient createBmlClient(String user){
- return createBmlClient(user, null, null);
+ public static BmlClient createBmlClient(String serverUrl){
+ return new HttpBmlClient(serverUrl);
}
- public static BmlClient createBmlClient(String user, Map<String, Object> properties){
- return createBmlClient(user, properties, null);
- }
- public static BmlClient createBmlClient(String user, Map<String, Object> properties, String serverUrl){
- if (serverUrl == null || "".equals(serverUrl.trim())) {
- serverUrl = HttpConf.gatewayInstance();
- }
- AbstractBmlClient bmlClient = new HttpBmlClient(serverUrl);
- bmlClient.setUser(user);
- bmlClient.setProperties(properties);
- bmlClient.init();
- return bmlClient;
+ public static BmlClient createBmlClient(DWSClientConfig clientConfig){
+ return new HttpBmlClient(clientConfig);
}
+ public static BmlClient createBmlClient(String serverUrl, Map<String, Object> properties){
+ return new HttpBmlClient(serverUrl, properties);
+ }
}
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala
index 675ca8d..cb7a1b5 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-client/src/main/scala/com/webank/wedatasphere/linkis/bml/client/impl/HttpBmlClient.scala
@@ -21,63 +21,75 @@
import com.webank.wedatasphere.linkis.bml.client.AbstractBmlClient
import com.webank.wedatasphere.linkis.bml.common._
import com.webank.wedatasphere.linkis.bml.conf.BmlConfiguration._
+import com.webank.wedatasphere.linkis.bml.http.HttpConf
import com.webank.wedatasphere.linkis.bml.protocol._
import com.webank.wedatasphere.linkis.bml.request._
import com.webank.wedatasphere.linkis.bml.response.{BmlCreateBmlProjectResult, _}
import com.webank.wedatasphere.linkis.common.conf.Configuration
import com.webank.wedatasphere.linkis.common.io.FsPath
+import com.webank.wedatasphere.linkis.common.utils.Logging
import com.webank.wedatasphere.linkis.httpclient.authentication.AuthenticationStrategy
-import com.webank.wedatasphere.linkis.httpclient.config.{ClientConfig, ClientConfigBuilder}
+import com.webank.wedatasphere.linkis.httpclient.config.ClientConfigBuilder
import com.webank.wedatasphere.linkis.httpclient.dws.DWSHttpClient
import com.webank.wedatasphere.linkis.httpclient.dws.authentication.TokenAuthenticationStrategy
import com.webank.wedatasphere.linkis.httpclient.dws.config.DWSClientConfig
import com.webank.wedatasphere.linkis.storage.FSFactory
import org.apache.commons.io.IOUtils
import org.apache.commons.lang.StringUtils
-import org.slf4j.{Logger, LoggerFactory}
-class HttpBmlClient(serverUrl: String) extends AbstractBmlClient {
+class HttpBmlClient(clientConfig: DWSClientConfig,
+ serverUrl: String,
+ properties: util.Map[String, Any]) extends AbstractBmlClient with Logging {
- private val logger:Logger = LoggerFactory.getLogger(classOf[HttpBmlClient])
+ def this(serverUrl: String) = this(null, null, null)
- val DEFAULT_CLIENT_NAME:String = "BML-Client"
- var dwsClientConfig:DWSClientConfig = null
- var dwsClient:DWSHttpClient = null
- val FIRST_VERSION:String = "v000001"
+ def this() = this(null, null, null)
- override def init(): Unit = {
- val config = if (this.properties == null) {
- new util.HashMap[String, Any]()
- } else {
- this.properties
- }
+ def this(clientConfig: DWSClientConfig) = this(clientConfig, null, null)
- val maxConnection:Int = config.getOrDefault(CONNECTION_MAX_SIZE_SHORT_NAME, CONNECTION_MAX_SIZE.getValue).asInstanceOf[Int]
- val connectTimeout:Int = config.getOrDefault(CONNECTION_TIMEOUT_SHORT_NAME, CONNECTION_TIMEOUT.getValue).asInstanceOf[Int]
- val readTimeout:Int = config.getOrDefault(CONNECTION_READ_TIMEOUT_SHORT_NAME, CONNECTION_READ_TIMEOUT.getValue).asInstanceOf[Int]
- val tokenKey = config.getOrDefault(AUTH_TOKEN_KEY_SHORT_NAME, AUTH_TOKEN_KEY.getValue).asInstanceOf[String]
- val tokenValue = config.getOrDefault(AUTH_TOKEN_VALUE_SHORT_NAME, AUTH_TOKEN_VALUE.getValue).asInstanceOf[String]
- val clientName = config.getOrDefault(CLIENT_NAME_SHORT_NAME, DEFAULT_CLIENT_NAME).asInstanceOf[String]
-
- val authenticationStrategy:AuthenticationStrategy = new TokenAuthenticationStrategy()
- val clientConfig:ClientConfig = ClientConfigBuilder.newBuilder()
- .addServerUrl(serverUrl)
- .connectionTimeout(connectTimeout)
- .discoveryEnabled(false)
- .loadbalancerEnabled(false)
- .maxConnectionSize(maxConnection)
- .retryEnabled(false)
- .readTimeout(readTimeout)
- .setAuthenticationStrategy(authenticationStrategy)
- .setAuthTokenKey(tokenKey)
- .setAuthTokenValue(tokenValue)
- .build()
-
- dwsClientConfig = new DWSClientConfig(clientConfig)
- dwsClientConfig.setDWSVersion(Configuration.LINKIS_WEB_VERSION.getValue)
- dwsClient = new DWSHttpClient(dwsClientConfig, clientName)
+ def this(serverUrl: String, properties: util.Map[String, Any]) = {
+ this(null, serverUrl, properties)
}
+ private val clientName = if(properties == null) "BML-Client"
+ else properties.getOrDefault(CLIENT_NAME_SHORT_NAME, "BML-Client").asInstanceOf[String]
+ private val dwsClient = new DWSHttpClient(if(clientConfig != null) clientConfig else createClientConfig(), clientName)
+ val FIRST_VERSION = "v000001"
+
+ private def createClientConfig(): DWSClientConfig = {
+ val _serverUrl = if(StringUtils.isEmpty(serverUrl)) HttpConf.gatewayInstance else serverUrl
+ if(StringUtils.isEmpty(_serverUrl)) throw BmlClientFailException("serverUrl cannot be null.")
+ val config = if (properties == null) {
+ new util.HashMap[String, Object]()
+ } else {
+ properties
+ }
+
+ val maxConnection:Int = config.getOrDefault(CONNECTION_MAX_SIZE_SHORT_NAME, CONNECTION_MAX_SIZE.getValue).asInstanceOf[Int]
+ val connectTimeout:Int = config.getOrDefault(CONNECTION_TIMEOUT_SHORT_NAME, CONNECTION_TIMEOUT.getValue).asInstanceOf[Int]
+ val readTimeout:Int = config.getOrDefault(CONNECTION_READ_TIMEOUT_SHORT_NAME, CONNECTION_READ_TIMEOUT.getValue).asInstanceOf[Int]
+ val tokenKey = config.getOrDefault(AUTH_TOKEN_KEY_SHORT_NAME, AUTH_TOKEN_KEY.getValue).asInstanceOf[String]
+ val tokenValue = config.getOrDefault(AUTH_TOKEN_VALUE_SHORT_NAME, AUTH_TOKEN_VALUE.getValue).asInstanceOf[String]
+
+ val authenticationStrategy:AuthenticationStrategy = new TokenAuthenticationStrategy()
+ val clientConfig = ClientConfigBuilder.newBuilder()
+ .addServerUrl(_serverUrl)
+ .connectionTimeout(connectTimeout)
+ .discoveryEnabled(false)
+ .loadbalancerEnabled(false)
+ .maxConnectionSize(maxConnection)
+ .retryEnabled(false)
+ .readTimeout(readTimeout)
+ .setAuthenticationStrategy(authenticationStrategy)
+ .setAuthTokenKey(tokenKey)
+ .setAuthTokenValue(tokenValue)
+ .build()
+
+ val dwsClientConfig = new DWSClientConfig(clientConfig)
+ dwsClientConfig.setDWSVersion(Configuration.LINKIS_WEB_VERSION.getValue)
+ dwsClientConfig
+ }
+
override def downloadResource(user:String, resourceID: String): BmlDownloadResponse = {
downloadResource(user, resourceID, "")
}
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml
index a84cb4d..65e7967 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/pom.xml
index 72882b9..b9e0abf 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/pom.xml
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala b/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala
index c3661aa..16db9b1 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-engine-hook/src/main/scala/com/webank/wedatasphere/linkis/bml/hook/BmlEnginePreExecuteHook.scala
@@ -26,13 +26,18 @@
import com.webank.wedatasphere.linkis.engineconn.common.creation.EngineCreationContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.execute.EngineExecutionContext
import com.webank.wedatasphere.linkis.engineconn.computation.executor.hook.ComputationExecutorHook
+import com.webank.wedatasphere.linkis.engineconn.computation.executor.utlis.ComputationEngineConstant
import com.webank.wedatasphere.linkis.engineconn.core.util.EngineConnUtils
import com.webank.wedatasphere.linkis.governance.common.utils.GovernanceConstant
import org.apache.commons.lang.StringUtils
+import scala.collection.JavaConversions.asScalaBuffer
+
class BmlEnginePreExecuteHook extends ComputationExecutorHook with Logging{
override def getHookName: String = "BmlEnginePreExecuteHook"
+ override def getOrder(): Int = ComputationEngineConstant.CS_HOOK_ORDER + 1
+
val processUser:String = System.getProperty("user.name")
val defaultUser:String = "hadoop"
@@ -47,11 +52,11 @@
override def beforeExecutorExecute(engineExecutionContext: EngineExecutionContext, engineCreationContext: EngineCreationContext, code: String): String = {
val props = engineExecutionContext.getProperties
- if (null != props && props.containsKey(GovernanceConstant.TASK_SOURCE_MAP_KEY)) {
+ if (null != props && props.containsKey(GovernanceConstant.TASK_RESOURCES_STR)) {
val workDir = BmlHookUtils.getCurrentWorkDir
val jobId = engineExecutionContext.getJobId
- props.get(GovernanceConstant.TASK_SOURCE_MAP_KEY) match {
- case resources: List[Object] =>
+ props.get(GovernanceConstant.TASK_RESOURCES_STR) match {
+ case resources: util.List[Object] =>
resources.foreach {
case resource: util.Map[String, Object] => val fileName = resource.get(GovernanceConstant.TASK_RESOURCE_FILE_NAME_STR).toString
val resourceId = resource.get(GovernanceConstant.TASK_RESOURCE_ID_STR).toString
diff --git a/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml b/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml
index 46b5a8c..9fb8649 100644
--- a/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml
+++ b/linkis-public-enhancements/linkis-bml/linkis-bml-server/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-bml/pom.xml b/linkis-public-enhancements/linkis-bml/pom.xml
index 2accac0..50dd858 100644
--- a/linkis-public-enhancements/linkis-bml/pom.xml
+++ b/linkis-public-enhancements/linkis-bml/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-cache/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-cache/pom.xml
index 99fc9f9..1c04399 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-cache/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-cache/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml
index 23e4a26..e113258 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-client/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml
index b37126c..a8a9fd4 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-engine-support/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-engine-support/pom.xml
index e5d84a6..aef11dc 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-engine-support/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-engine-support/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-highavailable/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-highavailable/pom.xml
index e454319..5598548 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-highavailable/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-highavailable/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml
index 1281725..58138e4 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-listener/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
@@ -35,7 +35,7 @@
<dependency>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis-cs-common</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</dependency>
<dependency>
<groupId>junit</groupId>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-persistence/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-persistence/pom.xml
index 8960f4c..cf09fcd 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-persistence/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-persistence/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-search/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-search/pom.xml
index f2a20a5..9261683 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-search/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-search/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml
index 78d3980..08e61e3 100644
--- a/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/linkis-cs-server/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-context-service/pom.xml b/linkis-public-enhancements/linkis-context-service/pom.xml
index 7dffbee..abc35d5 100644
--- a/linkis-public-enhancements/linkis-context-service/pom.xml
+++ b/linkis-public-enhancements/linkis-context-service/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-datasource/datasourcemanager/common/pom.xml b/linkis-public-enhancements/linkis-datasource/datasourcemanager/common/pom.xml
index b10ed56..ba1d2d2 100644
--- a/linkis-public-enhancements/linkis-datasource/datasourcemanager/common/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/datasourcemanager/common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-datasource/datasourcemanager/server/pom.xml b/linkis-public-enhancements/linkis-datasource/datasourcemanager/server/pom.xml
index 29622d3..019135e 100644
--- a/linkis-public-enhancements/linkis-datasource/datasourcemanager/server/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/datasourcemanager/server/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
index 6addb5a..1546c28 100644
--- a/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/linkis-metadata/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<packaging>jar</packaging>
diff --git a/linkis-public-enhancements/linkis-datasource/metadatamanager/common/pom.xml b/linkis-public-enhancements/linkis-datasource/metadatamanager/common/pom.xml
index 452d807..d4277e6 100644
--- a/linkis-public-enhancements/linkis-datasource/metadatamanager/common/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/metadatamanager/common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-datasource/metadatamanager/server/pom.xml b/linkis-public-enhancements/linkis-datasource/metadatamanager/server/pom.xml
index 4dd2532..34ae784 100644
--- a/linkis-public-enhancements/linkis-datasource/metadatamanager/server/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/metadatamanager/server/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>linkis-metadatamanager-server</artifactId>
diff --git a/linkis-public-enhancements/linkis-datasource/metadatamanager/service/elasticsearch/pom.xml b/linkis-public-enhancements/linkis-datasource/metadatamanager/service/elasticsearch/pom.xml
index 6bebedc..01ad2b0 100644
--- a/linkis-public-enhancements/linkis-datasource/metadatamanager/service/elasticsearch/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/metadatamanager/service/elasticsearch/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-datasource/metadatamanager/service/hive/pom.xml b/linkis-public-enhancements/linkis-datasource/metadatamanager/service/hive/pom.xml
index 1aa439f..2327d65 100644
--- a/linkis-public-enhancements/linkis-datasource/metadatamanager/service/hive/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/metadatamanager/service/hive/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>linkis-metadatamanager-service-hive</artifactId>
diff --git a/linkis-public-enhancements/linkis-datasource/metadatamanager/service/mysql/pom.xml b/linkis-public-enhancements/linkis-datasource/metadatamanager/service/mysql/pom.xml
index 3603eec..d3d1d62 100644
--- a/linkis-public-enhancements/linkis-datasource/metadatamanager/service/mysql/pom.xml
+++ b/linkis-public-enhancements/linkis-datasource/metadatamanager/service/mysql/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/pom.xml
index 8efc9f5..ce41e27 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-configuration</artifactId>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/java/com/webank/wedatasphere/linkis/configuration/restful/api/ConfigurationRestfulApi.java b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/java/com/webank/wedatasphere/linkis/configuration/restful/api/ConfigurationRestfulApi.java
index e45d137..128d2b7 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/java/com/webank/wedatasphere/linkis/configuration/restful/api/ConfigurationRestfulApi.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/java/com/webank/wedatasphere/linkis/configuration/restful/api/ConfigurationRestfulApi.java
@@ -22,6 +22,7 @@
import com.webank.wedatasphere.linkis.configuration.service.CategoryService;
import com.webank.wedatasphere.linkis.configuration.service.ConfigurationService;
import com.webank.wedatasphere.linkis.configuration.util.ConfigurationConfiguration;
+import com.webank.wedatasphere.linkis.configuration.util.JsonNodeUtil;
import com.webank.wedatasphere.linkis.configuration.util.LabelEntityParser;
import com.webank.wedatasphere.linkis.manager.label.entity.engine.EngineTypeLabel;
import com.webank.wedatasphere.linkis.manager.label.entity.engine.UserCreatorLabel;
@@ -211,9 +212,10 @@
@POST
@Path("/saveFullTree")
- public Response saveFullTree(@Context HttpServletRequest req, JsonNode json) throws IOException {
+ public Response saveFullTree(@Context HttpServletRequest req, JsonNode json) throws IOException, ConfigurationException {
List fullTrees = mapper.readValue(json.get("fullTree"), List.class);
- String creator = json.get("creator").asText();
+ String creator = JsonNodeUtil.getStringValue(json.get("creator"));
+ String engineType = JsonNodeUtil.getStringValue(json.get("engineType"));
if(creator != null && (creator.equals("通用设置") || creator.equals("全局设置"))){
creator = "*";
}
@@ -229,7 +231,18 @@
configurationService.updateUserValue(setting, userLabelId, createList, updateList);
}
}
+ String engine = null;
+ String version = null;
+ if(engineType != null){
+ String[] tmpString = engineType.split("-");
+ if(tmpString.length != 2){
+ throw new ConfigurationException("The saved engine type parameter is incorrect, please send it in a fixed format, such as spark-2.4.3(保存的引擎类型参数有误,请按照固定格式传送,例如spark-2.4.3)");
+ }
+ engine = tmpString[0];
+ version = tmpString[1];
+ }
configurationService.updateUserValue(createList, updateList);
+ configurationService.clearAMCacheConf(username,creator,engine,version);
Message message = Message.ok();
return Message.messageToResponse(message);
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/conf/Configuration.scala b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/conf/Configuration.scala
index 062fb91..8cdec78 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/conf/Configuration.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/conf/Configuration.scala
@@ -25,4 +25,5 @@
val ENGINE_TYPE = CommonVars.apply("wds.linkis.configuration.engine.type", EngineType.getAllEngineTypes.asScala.mkString(","))
+ val MANAGER_SPRING_NAME = CommonVars("wds.linkis.engineconn.manager.name", "linkis-cg-linkismanager")
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/service/ConfigurationService.scala b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/service/ConfigurationService.scala
index 1a8703f..a095ea2 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/service/ConfigurationService.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/service/ConfigurationService.scala
@@ -27,12 +27,14 @@
import com.webank.wedatasphere.linkis.configuration.util.{LabelEntityParser, LabelParameterParser}
import com.webank.wedatasphere.linkis.configuration.validate.ValidatorManager
import com.webank.wedatasphere.linkis.governance.common.protocol.conf.ResponseQueryConfig
+import com.webank.wedatasphere.linkis.manager.common.protocol.conf.RemoveCacheConfRequest
import com.webank.wedatasphere.linkis.manager.label.builder.CombinedLabelBuilder
import com.webank.wedatasphere.linkis.manager.label.builder.factory.LabelBuilderFactoryContext
import com.webank.wedatasphere.linkis.manager.label.constant.{LabelConstant, LabelKeyConstant}
import com.webank.wedatasphere.linkis.manager.label.entity.engine.{EngineTypeLabel, UserCreatorLabel}
import com.webank.wedatasphere.linkis.manager.label.entity.{CombinedLabel, CombinedLabelImpl, Label, SerializableLabel}
-import com.webank.wedatasphere.linkis.manager.label.utils.LabelUtils
+import com.webank.wedatasphere.linkis.manager.label.utils.{EngineTypeLabelCreator, LabelUtils}
+import com.webank.wedatasphere.linkis.rpc.Sender
import org.apache.commons.lang.StringUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
@@ -141,6 +143,23 @@
}
}
+ def clearAMCacheConf(username: String, creator: String, engine: String, version: String): Unit = {
+ val sender = Sender.getSender(Configuration.MANAGER_SPRING_NAME.getValue)
+ if(StringUtils.isNotEmpty(username)){
+ val userCreatorLabel = LabelBuilderFactoryContext.getLabelBuilderFactory.createLabel(classOf[UserCreatorLabel])
+ userCreatorLabel.setUser(username)
+ userCreatorLabel.setCreator(creator)
+ val request = new RemoveCacheConfRequest
+ request.setUserCreatorLabel(userCreatorLabel)
+ if(StringUtils.isNotEmpty(engine) && StringUtils.isNotEmpty(version)){
+ val engineTypeLabel = EngineTypeLabelCreator.createEngineTypeLabel(engine)
+ engineTypeLabel.setVersion(version)
+ request.setEngineTypeLabel(engineTypeLabel)
+ }
+ sender.ask(request)
+ }
+ }
+
def updateUserValue(setting: ConfigKeyValue, userLabelId: Integer,
createList: util.List[ConfigValue], updateList: util.List[ConfigValue]) = {
paramCheck(setting)
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/util/JsonNodeUtil.scala b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/util/JsonNodeUtil.scala
new file mode 100644
index 0000000..7e2f118
--- /dev/null
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/util/JsonNodeUtil.scala
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.linkis.configuration.util
+
+import org.codehaus.jackson.JsonNode
+
+object JsonNodeUtil {
+
+ def getStringValue(node: JsonNode): String = {
+ if(node == null || node.asText().toLowerCase().equals("null")){
+ null
+ }else{
+ node.asText()
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/validate/JsonValidator.scala b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/validate/JsonValidator.scala
new file mode 100644
index 0000000..a7a2d87
--- /dev/null
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-configuration/src/main/scala/com/webank/wedatasphere/linkis/configuration/validate/JsonValidator.scala
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2019 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.linkis.configuration.validate
+
+import com.webank.wedatasphere.linkis.common.utils.Utils
+import com.webank.wedatasphere.linkis.server.BDPJettyServerHelper
+
+class JsonValidator extends Validator{
+
+ override def validate(value: String, range: String): Boolean = {
+ Utils.tryCatch{
+ BDPJettyServerHelper.gson.fromJson(value,classOf[java.util.HashMap[String,String]])
+ true
+ }{
+ case _ => false
+ }
+ }
+
+ override var kind: String = "Json"
+}
\ No newline at end of file
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-client/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-client/pom.xml
index 4bbf30a..190d763 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-client/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-client/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-common/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-common/pom.xml
index 89bd970..fc7ac2e 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-common/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-common/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-server/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-server/pom.xml
index 2b0d372..77d96bc 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-server/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/linkis-error-code-server/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/pom.xml
index 3b3672e..06100b6 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-error-code/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-error-code/pom.xml
@@ -17,7 +17,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/pom.xml
index adf72f1..82b9b5c 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/src/main/scala/com/webank/wedatasphere/linkis/instance/label/client/EurekaInstanceLabelClient.scala b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/src/main/scala/com/webank/wedatasphere/linkis/instance/label/client/EurekaInstanceLabelClient.scala
index 73b10a6..9a313b1 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/src/main/scala/com/webank/wedatasphere/linkis/instance/label/client/EurekaInstanceLabelClient.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-client/src/main/scala/com/webank/wedatasphere/linkis/instance/label/client/EurekaInstanceLabelClient.scala
@@ -47,12 +47,13 @@
}
}
- @EventListener
+ @EventListener(classes = Array(classOf[ContextClosedEvent]))
def shutdown(contextClosedEvent: ContextClosedEvent): Unit = {
info("To remove labels for instance")
val insLabelRemoveRequest = new InsLabelRemoveRequest
insLabelRemoveRequest.setServiceInstance(Sender.getThisServiceInstance)
InstanceLabelClient.getInstance.removeLabelsFromInstance(insLabelRemoveRequest)
+ info("success to send clear label rpc request")
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/pom.xml
index 717f61f..5661ae8 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/InstanceInfoDao.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/InstanceInfoDao.java
index 7825bd4..4a3d242 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/InstanceInfoDao.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/InstanceInfoDao.java
@@ -16,6 +16,7 @@
package com.webank.wedatasphere.linkis.instance.label.dao;
+import com.webank.wedatasphere.linkis.common.ServiceInstance;
import com.webank.wedatasphere.linkis.instance.label.entity.InstanceInfo;
@@ -26,4 +27,10 @@
* @param instanceInfo instance information
*/
void insertOne(InstanceInfo instanceInfo);
+
+ void removeInstance(ServiceInstance instance);
+
+ void updateInstance(InstanceInfo instanceInfo);
+
+ InstanceInfo getInstanceInfoByServiceInstance(ServiceInstance serviceInstance);
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/impl/InstanceInfoMapper.xml b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/impl/InstanceInfoMapper.xml
index 5051c24..0899515 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/impl/InstanceInfoMapper.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/dao/impl/InstanceInfoMapper.xml
@@ -17,9 +17,35 @@
<!DOCTYPE mapper PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN" "http://mybatis.org/dtd/mybatis-3-mapper.dtd" >
<mapper namespace="com.webank.wedatasphere.linkis.instance.label.dao.InstanceInfoDao">
+
+ <resultMap id="instanceInfoMap" type="InstanceInfo">
+ <result property="id" column="id"/>
+ <result property="instance" column="instance"/>
+ <result property="applicationName" column="name"/>
+ <result property="updateTime" column="update_time"/>
+ <result property="createTime" column="create_time"/>
+ </resultMap>
+
<insert id="insertOne" parameterType="InstanceInfo">
<![CDATA[INSERT INTO `linkis_ps_instance_info`(`instance`, `name`)
VALUES(#{instance}, #{applicationName})]]>
</insert>
+ <delete id="removeInstance" parameterType="com.webank.wedatasphere.linkis.common.ServiceInstance">
+ DELETE FROM linkis_ps_instance_info WHERE instance = #{instance}
+ </delete>
+
+ <update id="updateInstance" parameterType="com.webank.wedatasphere.linkis.instance.label.entity.InstanceInfo">
+ UPDATE linkis_ps_instance_info SET
+ <if test="#{applicationName} != null">name = #{applicationName},</if>
+ <if test="#{instance} != null">instance = #{instance},</if>
+ <if test="#{updateTime != null}">update_time = #{updateTime}</if>
+ WHERE id = #{id}
+ </update>
+
+ <select id="getInstanceInfoByServiceInstance" parameterType="com.webank.wedatasphere.linkis.common.ServiceInstance" resultMap="instanceInfoMap">
+ <![CDATA[SELECT * FROM linkis_ps_instance_info
+ WHERE name = #{applicationName} AND instance = #{instance}]]>
+ </select>
+
</mapper>
\ No newline at end of file
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/restful/InstanceRestful.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/restful/InstanceRestful.java
index 7c79c4b..f154081 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/restful/InstanceRestful.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/restful/InstanceRestful.java
@@ -108,7 +108,10 @@
throw new Exception("Failed to update label, include repeat label(更新label失败,包含重复label)");
}
insLabelService.refreshLabelsToInstance(labels,instance);
- return Message.messageToResponse(Message.ok("更新Label成功").data("labels",labels));
+ InstanceInfo instanceInfo = insLabelService.getInstanceInfoByServiceInstance(instance);
+ instanceInfo.setUpdateTime(new Date());
+ insLabelService.updateInstance(instanceInfo);
+ return Message.messageToResponse(Message.ok("success").data("labels",labels));
}
@GET
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelAccessService.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelAccessService.java
index 582aec7..3a61a34 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelAccessService.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelAccessService.java
@@ -84,5 +84,9 @@
*/
List<InstanceInfo> listAllInstanceWithLabel();
+ void removeInstance(ServiceInstance serviceInstance);
+ void updateInstance(InstanceInfo instanceInfo);
+
+ InstanceInfo getInstanceInfoByServiceInstance(ServiceInstance serviceInstance);
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelService.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelService.java
index 0a272f9..ea45f38 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelService.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/InsLabelService.java
@@ -77,8 +77,7 @@
@Cacheable({"instance"})
List<ServiceInstance> searchLabelRelatedInstances(ServiceInstance serviceInstance);
-
-
+ void removeInstance(ServiceInstance serviceInstance);
@CacheEvict(cacheNames = {"instance", "label"}, allEntries = true)
void evictCache();
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelService.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelService.java
index 8160974..11adaa9 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelService.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelService.java
@@ -43,6 +43,7 @@
import org.slf4j.LoggerFactory;
import org.springframework.aop.framework.AopContext;
import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.EnableAspectJAutoProxy;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -59,6 +60,7 @@
import static org.apache.commons.lang.math.NumberUtils.isNumber;
@AdapterMode
+@EnableAspectJAutoProxy(proxyTargetClass = true, exposeProxy = true)
@Service
public class DefaultInsLabelService implements InsLabelAccessService {
@@ -70,6 +72,8 @@
private InstanceInfoDao instanceDao;
@Autowired
private InsLabelRelationDao insLabelRelationDao;
+ @Autowired
+ private InstanceInfoDao instanceInfoDao;
private AsyncConsumerQueue<InsPersistenceLabel> asyncRemoveLabelQueue;
private InsLabelAccessService selfService;
private AtomicBoolean asyncQueueInit = new AtomicBoolean(false);
@@ -109,7 +113,7 @@
LOG.info("Insert/Update service instance info: [" + serviceInstance + "]");
doInsertInstance(serviceInstance);
List<Integer> insLabelIds = insLabels.stream().map(InsPersistenceLabel :: getId).collect(Collectors.toList());
- LOG.trace("Build relation between labels: " +
+ LOG.info("Build relation between labels: " +
LabelUtils.Jackson.toJson(insLabelIds, null)
+ " and instance: [" + serviceInstance.getInstance() + "]");
batchOperation(insLabelIds, subInsLabelIds -> insLabelRelationDao.insertRelations(serviceInstance.getInstance(), subInsLabelIds), InsLabelConf.DB_PERSIST_BATCH_SIZE.getValue());
@@ -149,16 +153,16 @@
LOG.info("Drop relationships related by instance: [" + serviceInstance.getInstance() + "]");
insLabelRelationDao.dropRelationsByInstance(serviceInstance.getInstance());
// Async to delete labels that have no relationship
- if(!labelsCandidateRemoved.isEmpty()){
- labelsCandidateRemoved.forEach( label -> {
- if(!asyncQueueInit.get()) {
- initQueue();
- }
- if(!asyncRemoveLabelQueue.offer(label)){
- LOG.warn("Async queue for removing labels maybe full. current size: [" + asyncRemoveLabelQueue.size() + "]");
- }
- });
- }
+// if(!labelsCandidateRemoved.isEmpty()){
+// labelsCandidateRemoved.forEach( label -> {
+// if(!asyncQueueInit.get()) {
+// initQueue();
+// }
+// if(!asyncRemoveLabelQueue.offer(label)){
+// LOG.warn("Async queue for removing labels maybe full. current size: [" + asyncRemoveLabelQueue.size() + "]");
+// }
+// });
+// }
}
@@ -238,6 +242,22 @@
return instances;
}
+ @Override
+ public void removeInstance(ServiceInstance serviceInstance) {
+ instanceInfoDao.removeInstance(serviceInstance);
+ }
+
+ @Override
+ public InstanceInfo getInstanceInfoByServiceInstance(ServiceInstance serviceInstance){
+ InstanceInfo instanceInfo = instanceInfoDao.getInstanceInfoByServiceInstance(serviceInstance);
+ return instanceInfo;
+ }
+
+ @Override
+ public void updateInstance(InstanceInfo instanceInfo){
+ instanceInfoDao.updateInstance(instanceInfo);
+ }
+
public String getEurekaURL() throws Exception {
String eurekaURL = InsLabelConf.EUREKA_IPADDRESS.getValue();
String realURL = eurekaURL;
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelServiceAdapter.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelServiceAdapter.java
index a8ab79c..85da445 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelServiceAdapter.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/DefaultInsLabelServiceAdapter.java
@@ -115,6 +115,14 @@
}
@Override
+ public void removeInstance(ServiceInstance serviceInstance) {
+ execOnServiceChain("removeInstance", insLabelAccessService -> {
+ insLabelAccessService.removeInstance(serviceInstance);
+ return true;
+ }, true, true);
+ }
+
+ @Override
public void evictCache() {
//Empty
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/EurekaInsLabelService.java b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/EurekaInsLabelService.java
index 1431716..7ca10e7 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/EurekaInsLabelService.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/java/com/webank/wedatasphere/linkis/instance/label/service/impl/EurekaInsLabelService.java
@@ -84,4 +84,18 @@
return null;
}
+ @Override
+ public void removeInstance(ServiceInstance serviceInstance) {
+
+ }
+
+ @Override
+ public void updateInstance(InstanceInfo instanceInfo) {
+
+ }
+
+ @Override
+ public InstanceInfo getInstanceInfoByServiceInstance(ServiceInstance serviceInstance) {
+ return null;
+ }
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/scala/com/webank/wedatasphere/linkis/instance/label/service/rpc/DefaultInsLabelRpcService.scala b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/scala/com/webank/wedatasphere/linkis/instance/label/service/rpc/DefaultInsLabelRpcService.scala
index 97cdc33..647d6fd 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/scala/com/webank/wedatasphere/linkis/instance/label/service/rpc/DefaultInsLabelRpcService.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-instance-label/linkis-instance-label-server/src/main/scala/com/webank/wedatasphere/linkis/instance/label/service/rpc/DefaultInsLabelRpcService.scala
@@ -88,7 +88,9 @@
)
info(s"Start to remove labels from instance[$instance]")
insLabelService.removeLabelsFromInstance(instance)
+ insLabelService.removeInstance(instance)
info(s"Success to remove labels from instance[$instance]")
+ info(s"Success to remove instance[$instance]")
}
@Receiver
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/pom.xml
index 7d73dcd..14bb624 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-jobhistory</artifactId>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/cache/impl/QueryCacheServiceImpl.java b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/cache/impl/QueryCacheServiceImpl.java
index 1b7978d..15af912 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/cache/impl/QueryCacheServiceImpl.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/cache/impl/QueryCacheServiceImpl.java
@@ -1,4 +1,4 @@
-/**
+package com.webank.wedatasphere.linkis.jobhistory.cache.impl; /**
package com.webank.wedatasphere.linkis.jobhistory.cache.impl;
import com.webank.wedatasphere.linkis.governance.common.entity.task.RequestPersistTask;
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/dao/impl/JobHistoryMapper.xml b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
index 9253cb9..394db37 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/dao/impl/JobHistoryMapper.xml
@@ -38,18 +38,19 @@
<result property = "instances" column = "instances" />
<result property = "metrics" column = "metrics" />
<result property = "engine_type" column = "engine_type" />
+ <result property = "execution_code" column = "execution_code" />
</resultMap>
<sql id="job_list">
`id`, `job_req_id`, `submit_user`, `execute_user`, `source`, `labels`, `params`,
`progress`, `status`, `log_path`, `error_code`, `error_desc`, `created_time`, `updated_time`,
- `instances`, `metrics`,`engine_type`
+ `instances`, `metrics`,`engine_type`, `execution_code`
</sql>
<sql id="insert_job_list">
`job_req_id`, `submit_user`, `execute_user`, `source`, `labels`, `params`,
- `progress`, `status`, `log_path`, `error_code`, `error_desc`, `created_time`, `updated_time`, `instances`, `metrics`,`engine_type`
+ `progress`, `status`, `log_path`, `error_code`, `error_desc`, `created_time`, `updated_time`, `instances`, `metrics`,`engine_type`, `execution_code`
</sql>
<insert id="insertJobHistory" flushCache="true" useGeneratedKeys="true" keyProperty="id" parameterType="com.webank.wedatasphere.linkis.jobhistory.entity.JobHistory">
@@ -57,7 +58,7 @@
VALUES (#{job_req_id},#{submit_user},#{execute_user},#{source},
#{labels},#{params},#{progress},
#{status},#{log_path},#{error_code},#{error_desc},
- #{created_time},#{updated_time},#{instances},#{metrics},#{engine_type})
+ #{created_time},#{updated_time},#{instances},#{metrics},#{engine_type},#{execution_code})
</insert>
<select id="selectJobHistory" useCache="false" resultMap="jobHistoryMap" parameterType="com.webank.wedatasphere.linkis.jobhistory.entity.JobHistory">
@@ -80,6 +81,7 @@
<if test="instances != null">and instances = #{instances}</if>
<!-- <if test="metrics != null">and metrics = #{metrics}</if>-->
<if test="engine_type != null">and engine_type = #{engine_type}</if>
+ <!-- <if test="execution_code != null">and execution_code = #{execution_code }</if>-->
</where>
</select>
@@ -116,6 +118,7 @@
<if test="instances != null">instances = #{instances},</if>
<if test="metrics != null">metrics = #{metrics},</if>
<if test="engine_type != null">engine_type = #{engine_type},</if>
+ <if test="execution_code != null">and execution_code = #{execution_code},</if>
</trim>
WHERE id =#{id}
</update>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/entity/JobHistory.java b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/entity/JobHistory.java
index 629ac05..48d4b78 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/entity/JobHistory.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/entity/JobHistory.java
@@ -54,6 +54,8 @@
private String engine_type;
+ private String execution_code;
+
public Long getId() {
return id;
}
@@ -189,4 +191,12 @@
public void setEngine_type(String engine_type) {
this.engine_type = engine_type;
}
+
+ public String getExecution_code() {
+ return execution_code;
+ }
+
+ public void setExecution_code(String execution_code) {
+ this.execution_code = execution_code;
+ }
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/restful/api/QueryRestfulApi.java b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/restful/api/QueryRestfulApi.java
index 36d2356..7163ce4 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/restful/api/QueryRestfulApi.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/restful/api/QueryRestfulApi.java
@@ -81,6 +81,10 @@
List<SubJobDetail> subJobDetails = TaskConversions.jobdetails2SubjobDetail(jobDetailMapper.selectJobDetailByJobHistoryId(jobId));
QueryTaskVO taskVO = TaskConversions.jobHistory2TaskVO(jobHistory, subJobDetails);
// todo check
+ if(taskVO == null){
+ return Message.messageToResponse(Message.error("The corresponding job was not found, or there may be no permission to view the job" +
+ "(没有找到对应的job,也可能是没有查看该job的权限)"));
+ }
for (SubJobDetail subjob : subJobDetails) {
if (!StringUtils.isEmpty(subjob.getResultLocation())) {
taskVO.setResultLocation(subjob.getResultLocation());
@@ -90,6 +94,9 @@
return Message.messageToResponse(Message.ok().data(TaskConstant.TASK, taskVO));
}
+ /**
+ * Method list should not contain subjob, which may cause performance problems.
+ */
@GET
@Path("/list")
public Response list(@Context HttpServletRequest req, @QueryParam("startDate") Long startDate,
@@ -139,16 +146,17 @@
long total = pageInfo.getTotal();
List<QueryTaskVO> vos = new ArrayList<>();
for (JobHistory jobHistory : list) {
- List<JobDetail> jobDetails = jobDetailMapper.selectJobDetailByJobHistoryId(jobHistory.getId());
- QueryTaskVO taskVO = TaskConversions.jobHistory2TaskVO(jobHistory, TaskConversions.jobdetails2SubjobDetail(jobDetails));
+ QueryUtils.exchangeExecutionCode(jobHistory);
+// List<JobDetail> jobDetails = jobDetailMapper.selectJobDetailByJobHistoryId(jobHistory.getId());
+ QueryTaskVO taskVO = TaskConversions.jobHistory2TaskVO(jobHistory, null);
vos.add(taskVO);
- // todo add first resultLocation to taskVO
+ /*// todo add first resultLocation to taskVO
for (JobDetail subjob : jobDetails) {
if (!StringUtils.isEmpty(subjob.getResult_location())) {
taskVO.setResultLocation(subjob.getResult_location());
}
break;
- }
+ }*/
}
return Message.messageToResponse(Message.ok().data(TaskConstant.TASKS, vos)
.data(JobRequestConstants.TOTAL_PAGE(), total));
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/transitional/TaskStatus.java b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/transitional/TaskStatus.java
index 52b31ac..32ef6d8 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/transitional/TaskStatus.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/java/com/webank/wedatasphere/linkis/jobhistory/transitional/TaskStatus.java
@@ -21,5 +21,13 @@
/*
This should be up-to-date with status in entrance job
*/
- Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout
+ Inited, WaitForRetry, Scheduled, Running, Succeed, Failed, Cancelled, Timeout;
+
+ public static boolean isComplete(TaskStatus taskStatus){
+ if(taskStatus == Succeed || taskStatus == Failed || taskStatus == Cancelled || taskStatus == Timeout){
+ return true;
+ }else{
+ return false;
+ }
+ }
}
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conf/JobhistoryConfiguration.scala b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conf/JobhistoryConfiguration.scala
index 973a3da..5f581f3 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conf/JobhistoryConfiguration.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conf/JobhistoryConfiguration.scala
@@ -19,6 +19,7 @@
import com.webank.wedatasphere.linkis.common.conf.CommonVars
object JobhistoryConfiguration {
+ //modify this param in linkis.properties
val GOVERNANCE_STATION_ADMIN = CommonVars("wds.linkis.governance.station.admin", "hadoop")
val JOB_HISTORY_SAFE_TRIGGER = CommonVars("wds.linkis.jobhistory.safe.trigger", true).getValue
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conversions/TaskConversions.scala b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conversions/TaskConversions.scala
index 7a3903a..6de35ad 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conversions/TaskConversions.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/conversions/TaskConversions.scala
@@ -54,6 +54,7 @@
task
}
+ /*@Deprecated
def queryTask2RequestPersistTask(queryTask: QueryTask): RequestPersistTask = {
QueryUtils.exchangeExecutionCode(queryTask)
val task = new RequestPersistTask
@@ -61,9 +62,9 @@
task.setSource(BDPJettyServerHelper.gson.fromJson(queryTask.getSourceJson, classOf[java.util.HashMap[String, String]]))
task.setParams(BDPJettyServerHelper.gson.fromJson(queryTask.getParamsJson, classOf[java.util.HashMap[String, Object]]))
task
- }
+ }*/
- @Deprecated
+ /*@Deprecated
def requestPersistTaskTask2QueryTask(requestPersistTask: RequestPersistTask): QueryTask = {
val task: QueryTask = new QueryTask
BeanUtils.copyProperties(requestPersistTask, task)
@@ -72,9 +73,9 @@
else
task.setParamsJson(null)
task
- }
+ }*/
- def queryTask2QueryTaskVO(queryTask: QueryTask): QueryTaskVO = {
+ /*def queryTask2QueryTaskVO(queryTask: QueryTask): QueryTaskVO = {
QueryUtils.exchangeExecutionCode(queryTask)
val taskVO = new QueryTaskVO
BeanUtils.copyProperties(queryTask, taskVO)
@@ -99,7 +100,7 @@
taskVO.setCostTime(System.currentTimeMillis() - queryTask.getCreatedTime().getTime());
}
taskVO
- }
+ }*/
def isJobFinished(status: String): Boolean = {
TaskStatus.Succeed.toString.equals(status) ||
@@ -137,6 +138,8 @@
jobReq.setUpdatedTime(job.getUpdated_time)
jobReq.setMetrics(BDPJettyServerHelper.gson.fromJson((job.getMetrics), classOf[util.Map[String, Object]]))
jobReq.setInstances(job.getInstances)
+ QueryUtils.exchangeExecutionCode(job)
+ jobReq.setExecutionCode(job.getExecution_code)
jobReq
}
@@ -166,6 +169,7 @@
if (null != jobReq.getMetrics) jobHistory.setMetrics(BDPJettyServerHelper.gson.toJson(jobReq.getMetrics))
val engineType = LabelUtil.getEngineType(jobReq.getLabels)
jobHistory.setEngine_type(engineType)
+ jobHistory.setExecution_code(jobReq.getExecutionCode)
jobHistory
}
@@ -270,10 +274,11 @@
val instances = job.getInstances().split(JobhistoryConfiguration.ENTRANCE_INSTANCE_DELEMITER.getValue)
taskVO.setStrongerExecId(ZuulEntranceUtils.generateExecID(job.getJob_req_id, entranceName, instances))
taskVO.setSourceJson(job.getSource)
- val code = new StringBuilder("")
- subjobs.asScala.foreach(job => code.append(job.getExecutionContent).append(";\n"))
- taskVO.setExecutionCode(code.toString())
- taskVO.setSubJobs(subjobs)
+ if (StringUtils.isNotBlank(job.getExecution_code)) {
+ taskVO.setExecutionCode(job.getExecution_code)
+ }
+ // Do not attach subjobs for performance
+ // taskVO.setSubJobs(subjobs)
taskVO.setSourceJson(job.getSource)
if (StringUtils.isNotBlank(job.getSource)) {
Utils.tryCatch {
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryDetailQueryService.java b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryDetailQueryService.java
index 6b92ce2..56ee94c 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryDetailQueryService.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryDetailQueryService.java
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.linkis.jobhistory.service;
import com.webank.wedatasphere.linkis.governance.common.protocol.job.*;
-
+import java.util.ArrayList;
public interface JobHistoryDetailQueryService {
@@ -25,6 +25,8 @@
JobRespProtocol change(JobDetailReqUpdate jobReqUpdate);
+ ArrayList<JobRespProtocol> batchChange(JobDetailReqBatchUpdate jobReqUpdate);
+
JobRespProtocol query(JobDetailReqQuery jobDetailReqQuery);
// QueryJobHistory getJobHistoryByIdAndName(Long jobID, String userName);
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryQueryService.java b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryQueryService.java
index 5f76b72..e56704c 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryQueryService.java
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/JobHistoryQueryService.java
@@ -17,12 +17,10 @@
package com.webank.wedatasphere.linkis.jobhistory.service;
import com.webank.wedatasphere.linkis.governance.common.entity.job.JobRequest;
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.JobReqInsert;
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.JobReqQuery;
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.JobReqUpdate;
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.JobRespProtocol;
+import com.webank.wedatasphere.linkis.governance.common.protocol.job.*;
import com.webank.wedatasphere.linkis.jobhistory.entity.JobHistory;
+import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@@ -33,6 +31,8 @@
JobRespProtocol change(JobReqUpdate jobReqUpdate);
+ ArrayList<JobRespProtocol> batchChange(JobReqBatchUpdate jobReqUpdate);
+
JobRespProtocol query(JobReqQuery jobReqQuery);
JobHistory getJobHistoryByIdAndName(Long jobID, String userName);
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryDetailQueryServiceImpl.scala b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryDetailQueryServiceImpl.scala
index f28d180..1568d6d 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryDetailQueryServiceImpl.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryDetailQueryServiceImpl.scala
@@ -26,7 +26,7 @@
import java.util
import com.webank.wedatasphere.linkis.governance.common.constant.job.JobRequestConstants
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.{JobDetailReqInsert, JobDetailReqQuery, JobDetailReqUpdate, JobRespProtocol}
+import com.webank.wedatasphere.linkis.governance.common.protocol.job.{JobDetailReqBatchUpdate, JobDetailReqInsert, JobDetailReqQuery, JobDetailReqUpdate, JobRespProtocol}
import com.webank.wedatasphere.linkis.jobhistory.conversions.TaskConversions._
import com.webank.wedatasphere.linkis.jobhistory.exception.QueryException
import com.webank.wedatasphere.linkis.jobhistory.transitional.TaskStatus
@@ -34,7 +34,7 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import org.springframework.transaction.annotation.Transactional
-
+import scala.collection.JavaConversions._
import scala.collection.JavaConverters.asScalaBufferConverter
@@ -51,9 +51,9 @@
@Receiver
override def add(jobReqInsert: JobDetailReqInsert): JobRespProtocol = {
info("Insert data into the database(往数据库中插入数据):job id : " + jobReqInsert.jobInfo.getJobReq().getId.toString)
- QueryUtils.storeExecutionCode(jobReqInsert.jobInfo)
val jobResp = new JobRespProtocol
Utils.tryCatch {
+ QueryUtils.storeExecutionCode(jobReqInsert.jobInfo.getSubJobDetail, jobReqInsert.jobInfo.getJobReq.getExecuteUser)
val jobInsert = subjobDetail2JobDetail(jobReqInsert.jobInfo.getSubJobDetail)
jobDetailMapper.insertJobDetail(jobInsert)
val map = new util.HashMap[String, Object]()
@@ -84,6 +84,7 @@
if (oldStatus != null && !shouldUpdate(oldStatus, jobDetail.getStatus))
throw new QueryException(s"${jobDetail.getId}数据库中的task状态为:${oldStatus}更新的task状态为:${jobDetail.getStatus}更新失败!")
}
+ jobDetail.setExecutionContent(null)
val jobUpdate = subjobDetail2JobDetail(jobDetail)
jobUpdate.setUpdated_time(new Timestamp(System.currentTimeMillis()))
jobDetailMapper.updateJobDetail(jobUpdate)
@@ -110,6 +111,53 @@
}
@Receiver
+ @Transactional
+ override def batchChange(jobReqUpdate: JobDetailReqBatchUpdate): util.ArrayList[JobRespProtocol] = {
+ val subJobInfoList = jobReqUpdate.jobInfo
+ val jobRespList = new util.ArrayList[JobRespProtocol]()
+ if(subJobInfoList != null){
+ subJobInfoList.foreach(subJobInfo => {
+ val jobDetail = subJobInfo.getSubJobDetail()
+ if (null != jobDetail && null != jobDetail.getId) {
+ info("Update data to the database(往数据库中更新数据):" + jobDetail.getId.toString)
+ }
+ val jobResp = new JobRespProtocol
+ Utils.tryCatch {
+ if (jobDetail.getStatus != null) {
+ val oldStatus: String = jobDetailMapper.selectJobDetailStatusForUpdateByJobDetailId(jobDetail.getId)
+ if (oldStatus != null && !shouldUpdate(oldStatus, jobDetail.getStatus))
+ throw new QueryException(s"${jobDetail.getId}数据库中的task状态为:${oldStatus}更新的task状态为:${jobDetail.getStatus}更新失败!")
+ }
+ jobDetail.setExecutionContent(null)
+ val jobUpdate = subjobDetail2JobDetail(jobDetail)
+ jobUpdate.setUpdated_time(new Timestamp(System.currentTimeMillis()))
+ jobDetailMapper.updateJobDetail(jobUpdate)
+
+ // todo
+ /*//updated by shanhuang to write cache
+ if (TaskStatus.Succeed.toString.equals(jobReq.getStatus) && queryCacheService.needCache(jobReq)) {
+ info("Write cache for task: " + jobReq.getId)
+ jobReq.setExecutionCode(executionCode)
+ queryCacheService.writeCache(jobReq)
+ }*/
+
+ val map = new util.HashMap[String, Object]
+ map.put(JobRequestConstants.JOB_ID, jobDetail.getId.asInstanceOf[Object])
+ jobResp.setStatus(0)
+ jobResp.setData(map)
+ } {
+ case e: Exception =>
+ error(e.getMessage)
+ jobResp.setStatus(1)
+ jobResp.setMsg(e.getMessage);
+ }
+ jobRespList.add(jobResp)
+ })
+ }
+ jobRespList
+ }
+
+ @Receiver
override def query(jobReqQuery: JobDetailReqQuery): JobRespProtocol = {
info("查询历史task:" + jobReqQuery.toString)
val jobResp = new JobRespProtocol
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
index 7bf4817..b9c3bdd 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/service/impl/JobHistoryQueryServiceImpl.scala
@@ -24,6 +24,7 @@
import com.webank.wedatasphere.linkis.jobhistory.conversions.TaskConversions._
import com.webank.wedatasphere.linkis.jobhistory.dao.{JobDetailMapper, JobHistoryMapper}
import com.webank.wedatasphere.linkis.jobhistory.entity.JobHistory
+import com.webank.wedatasphere.linkis.jobhistory.util.QueryUtils
import com.webank.wedatasphere.linkis.message.annotation.Receiver
import scala.collection.JavaConverters.asScalaBufferConverter
@@ -32,7 +33,7 @@
import com.webank.wedatasphere.linkis.governance.common.constant.job.JobRequestConstants
import com.webank.wedatasphere.linkis.governance.common.entity.job.{JobRequest, JobRequestWithDetail, SubJobDetail}
-import com.webank.wedatasphere.linkis.governance.common.protocol.job.{JobReqInsert, JobReqQuery, JobReqUpdate, JobRespProtocol}
+import com.webank.wedatasphere.linkis.governance.common.protocol.job.{JobReqBatchUpdate, JobReqInsert, JobReqQuery, JobReqUpdate, JobRespProtocol}
import com.webank.wedatasphere.linkis.jobhistory.entity.QueryJobHistory
import com.webank.wedatasphere.linkis.jobhistory.exception.QueryException
import com.webank.wedatasphere.linkis.jobhistory.service.JobHistoryQueryService
@@ -40,6 +41,7 @@
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import org.springframework.transaction.annotation.Transactional
+import scala.collection.JavaConversions._
@Service
@@ -55,9 +57,9 @@
@Receiver
override def add(jobReqInsert: JobReqInsert): JobRespProtocol = {
info("Insert data into the database(往数据库中插入数据):" + jobReqInsert.toString)
-// QueryUtils.storeExecutionCode(jobReqInsert)
val jobResp = new JobRespProtocol
Utils.tryCatch {
+ QueryUtils.storeExecutionCode(jobReqInsert.jobReq)
val jobInsert = jobRequest2JobHistory(jobReqInsert.jobReq)
jobHistoryMapper.insertJobHistory(jobInsert)
val map = new util.HashMap[String, Object]()
@@ -78,7 +80,7 @@
override def change(jobReqUpdate: JobReqUpdate): JobRespProtocol = {
val jobReq = jobReqUpdate.jobReq
jobReq.setExecutionCode(null)
- info("Update data to the database(往数据库中更新数据):" + jobReq.toString)
+ info("Update data to the database(往数据库中更新数据):status:" + jobReq.toString)
val jobResp = new JobRespProtocol
Utils.tryCatch {
if (jobReq.getErrorDesc != null) {
@@ -119,6 +121,57 @@
}
@Receiver
+ @Transactional
+ override def batchChange(jobReqUpdate: JobReqBatchUpdate): util.ArrayList[JobRespProtocol] = {
+ val jobReqList = jobReqUpdate.jobReq
+ val jobRespList = new util.ArrayList[JobRespProtocol]()
+ if(jobReqList != null){
+ jobReqList.foreach(jobReq =>{
+ jobReq.setExecutionCode(null)
+ info("Update data to the database(往数据库中更新数据):status:" + jobReq.getStatus )
+ val jobResp = new JobRespProtocol
+ Utils.tryCatch {
+ if (jobReq.getErrorDesc != null) {
+ if (jobReq.getErrorDesc.length > 256) {
+ info(s"errorDesc is too long,we will cut some message")
+ jobReq.setErrorDesc(jobReq.getErrorDesc.substring(0, 256))
+ info(s"${jobReq.getErrorDesc}")
+ }
+ }
+ if (jobReq.getStatus != null) {
+ val oldStatus: String = jobHistoryMapper.selectJobHistoryStatusForUpdate(jobReq.getId)
+ if (oldStatus != null && !shouldUpdate(oldStatus, jobReq.getStatus))
+ throw new QueryException(s"${jobReq.getId}数据库中的task状态为:${oldStatus}更新的task状态为:${jobReq.getStatus}更新失败!")
+ }
+ val jobUpdate = jobRequest2JobHistory(jobReq)
+ jobUpdate.setUpdated_time(new Timestamp(System.currentTimeMillis()))
+ jobHistoryMapper.updateJobHistory(jobUpdate)
+
+ // todo
+ /*//updated by shanhuang to write cache
+ if (TaskStatus.Succeed.toString.equals(jobReq.getStatus) && queryCacheService.needCache(jobReq)) {
+ info("Write cache for task: " + jobReq.getId)
+ jobReq.setExecutionCode(executionCode)
+ queryCacheService.writeCache(jobReq)
+ }*/
+
+ val map = new util.HashMap[String, Object]
+ map.put(JobRequestConstants.JOB_ID, jobReq.getId.asInstanceOf[Object])
+ jobResp.setStatus(0)
+ jobResp.setData(map)
+ } {
+ case e: Exception =>
+ error(e.getMessage)
+ jobResp.setStatus(1)
+ jobResp.setMsg(e.getMessage);
+ }
+ jobRespList.add(jobResp)
+ })
+ }
+ jobRespList
+ }
+
+ @Receiver
override def query(jobReqQuery: JobReqQuery): JobRespProtocol = {
info("查询历史task:" + jobReqQuery.toString)
val jobResp = new JobRespProtocol
@@ -172,8 +225,13 @@
jobHistory2JobRequest(list)
}
- private def shouldUpdate(oldStatus: String, newStatus: String): Boolean = TaskStatus.valueOf(oldStatus).ordinal <= TaskStatus.valueOf(newStatus).ordinal
-
+ private def shouldUpdate(oldStatus: String, newStatus: String): Boolean = {
+ if(TaskStatus.valueOf(oldStatus) == TaskStatus.valueOf(newStatus)){
+ true
+ }else{
+ TaskStatus.valueOf(oldStatus).ordinal <= TaskStatus.valueOf(newStatus).ordinal && !TaskStatus.isComplete(TaskStatus.valueOf(oldStatus))
+ }
+ }
override def searchOne(jobId: lang.Long, sDate: Date, eDate: Date): JobHistory = {
Iterables.getFirst(
jobHistoryMapper.search(jobId, null, null, sDate, eDate, null),
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/util/QueryUtils.scala b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/util/QueryUtils.scala
index a30af77..b264783 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/util/QueryUtils.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-jobhistory/src/main/scala/com/webank/wedatasphere/linkis/jobhistory/util/QueryUtils.scala
@@ -21,11 +21,11 @@
import com.webank.wedatasphere.linkis.common.conf.CommonVars
import com.webank.wedatasphere.linkis.common.io.FsPath
import com.webank.wedatasphere.linkis.common.utils.{Logging, Utils}
-import com.webank.wedatasphere.linkis.governance.common.entity.job.{SubJobDetail, SubJobInfo}
+import com.webank.wedatasphere.linkis.governance.common.entity.job.{JobRequest, SubJobDetail, SubJobInfo}
import com.webank.wedatasphere.linkis.governance.common.entity.task.RequestInsertTask
import com.webank.wedatasphere.linkis.governance.common.protocol.job.JobReqInsert
import com.webank.wedatasphere.linkis.jobhistory.conf.JobhistoryConfiguration
-import com.webank.wedatasphere.linkis.jobhistory.entity.QueryTask
+import com.webank.wedatasphere.linkis.jobhistory.entity.{JobHistory, QueryTask}
import com.webank.wedatasphere.linkis.storage.FSFactory
import com.webank.wedatasphere.linkis.storage.fs.FileSystem
import com.webank.wedatasphere.linkis.storage.utils.{FileSystemUtils, StorageUtils}
@@ -43,18 +43,23 @@
private val CODE_SPLIT = ";"
private val LENGTH_SPLIT = "#"
- def storeExecutionCode(jobReqInsert: SubJobInfo): Unit = {
- val jobReq = jobReqInsert.getJobReq
- val jobDetail = jobReqInsert.getSubJobDetail
- if (jobDetail.getExecutionContent.getBytes().length < CODE_STORE_LENGTH.getValue) return
- val user: String = jobReq.getSubmitUser
+ def storeExecutionCode(jobRequest: JobRequest): Unit = {
+ storeExecutionCode(jobRequest.getExecuteUser, jobRequest.getExecutionCode, path => jobRequest.setExecutionCode(path))
+ }
+
+ def storeExecutionCode(subJobDetail: SubJobDetail, user: String): Unit = {
+ storeExecutionCode(user, subJobDetail.getExecutionContent, path => subJobDetail.setExecutionContent(path))
+ }
+
+ def storeExecutionCode(user: String, code: String, pathCallback: String => Unit): Unit = {
+ if (null == code || code.getBytes().length < CODE_STORE_LENGTH.getValue) return
val path: String = getCodeStorePath(user)
val fsPath: FsPath = new FsPath(path)
val fileSystem = FSFactory.getFsByProxyUser(fsPath, user).asInstanceOf[FileSystem]
fileSystem.init(null)
var os: OutputStream = null
var position = 0L
- val codeBytes = jobDetail.getExecutionContent.getBytes(CHARSET)
+ val codeBytes = code.getBytes(CHARSET)
path.intern() synchronized {
Utils.tryFinally {
if (!fileSystem.exists(fsPath)) FileSystemUtils.createNewFile(fsPath, user, true)
@@ -67,13 +72,14 @@
}
}
val length = codeBytes.length
- jobDetail.setExecutionContent(path + CODE_SPLIT + position + LENGTH_SPLIT + length)
+ pathCallback(path + CODE_SPLIT + position + LENGTH_SPLIT + length)
}
- def exchangeExecutionCode(queryTask: QueryTask): Unit = {
+ // todo exchangeExecutionCode for subJobDetail
+ def exchangeExecutionCode(queryTask: JobHistory): Unit = {
import scala.util.control.Breaks._
- if (queryTask.getExecutionCode == null || !queryTask.getExecutionCode.startsWith(StorageUtils.HDFS_SCHEMA)) return
- val codePath = queryTask.getExecutionCode
+ if (queryTask.getExecution_code == null || !queryTask.getExecution_code.startsWith(StorageUtils.HDFS_SCHEMA)) return
+ val codePath = queryTask.getExecution_code
val path = codePath.substring(0, codePath.lastIndexOf(CODE_SPLIT))
val codeInfo = codePath.substring(codePath.lastIndexOf(CODE_SPLIT) + 1)
val infos: Array[String] = codeInfo.split(LENGTH_SPLIT)
@@ -82,7 +88,7 @@
val tub = new Array[Byte](1024)
val executionCode: StringBuilder = new StringBuilder
val fsPath: FsPath = new FsPath(path)
- val fileSystem = FSFactory.getFsByProxyUser(fsPath, queryTask.getUmUser).asInstanceOf[FileSystem]
+ val fileSystem = FSFactory.getFsByProxyUser(fsPath, queryTask.getExecute_user).asInstanceOf[FileSystem]
fileSystem.init(null)
var is: InputStream = null
if (!fileSystem.exists(fsPath)) return
@@ -102,7 +108,7 @@
IOUtils.closeQuietly(is)
if (fileSystem != null) Utils.tryAndWarn(fileSystem.close())
}
- queryTask.setExecutionCode(executionCode.toString())
+ queryTask.setExecution_code(executionCode.toString())
}
private def getCodeStorePath(user: String): String = {
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-client/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-client/pom.xml
index 6a3b82c..09d1be4 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-client/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-client/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-storage-script-dev-client</artifactId>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-server/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-server/pom.xml
index c1183c4..510a138 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-server/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-script-dev/linkis-storage-script-dev-server/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-storage-script-dev-server</artifactId>
<packaging>jar</packaging>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/pom.xml
index 834bd8e..e3cf6ee 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/pom.xml
@@ -18,7 +18,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/src/main/scala/com/webank/wedatasphere/linkis/udf/UDFClient.scala b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/src/main/scala/com/webank/wedatasphere/linkis/udf/UDFClient.scala
index f44527d..1e59cc6 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/src/main/scala/com/webank/wedatasphere/linkis/udf/UDFClient.scala
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-client/src/main/scala/com/webank/wedatasphere/linkis/udf/UDFClient.scala
@@ -28,14 +28,14 @@
object UDFClient {
- def getUdfInfos(userName: String): mutable.ArrayBuffer[UDFInfo] = {
+ def getUdfInfos(userName: String, category: String ): mutable.ArrayBuffer[UDFInfo] = {
val udfInfoBuilder = new mutable.ArrayBuffer[UDFInfo]
- val udfTree = queryUdfRpc(userName)
- if (null != udfTree) extractUdfInfos(udfInfoBuilder, udfTree, userName)
+ val udfTree = queryUdfRpc(userName,category )
+ if (null != udfTree) extractUdfInfos(udfInfoBuilder, udfTree, userName, category)
udfInfoBuilder
}
- private def extractUdfInfos(udfInfoBuilder: mutable.ArrayBuffer[UDFInfo], udfTree: UDFTree, userName: String) : Unit = {
+ private def extractUdfInfos(udfInfoBuilder: mutable.ArrayBuffer[UDFInfo], udfTree: UDFTree, userName: String, category: String) : Unit = {
if(CollectionUtils.isNotEmpty(udfTree.getUdfInfos)){
udfTree.getUdfInfos.foreach{ udfInfo: UDFInfo =>
udfInfoBuilder.append(udfInfo)
@@ -45,18 +45,18 @@
udfTree.getChildrens.foreach{ child: UDFTree =>
var childInfo = child
if(ConstantVar.specialTypes.contains(child.getUserName)){
- childInfo = queryUdfRpc(userName, child.getId, child.getUserName)
+ childInfo = queryUdfRpc(userName, category, child.getId, child.getUserName)
} else {
- childInfo = queryUdfRpc(userName, child.getId, ConstantVar.SELF)
+ childInfo = queryUdfRpc(userName, category, child.getId, ConstantVar.SELF)
}
- if (null != childInfo) extractUdfInfos(udfInfoBuilder, childInfo, userName)
+ if (null != childInfo) extractUdfInfos(udfInfoBuilder, childInfo, userName, category)
}
}
}
- private def queryUdfRpc(userName: String, treeId: Long = -1, treeType: String = "self"): UDFTree = {
+ private def queryUdfRpc(userName: String, category: String , treeId: Long = -1, treeType: String = "self"): UDFTree = {
val udfTree = Sender.getSender(UDFClientConfiguration.UDF_SERVICE_NAME.getValue)
- .ask(RequestUdfTree(userName, treeType, treeId, "udf"))
+ .ask(RequestUdfTree(userName, treeType, treeId, category))
.asInstanceOf[ResponseUdfTree]
.udfTree
//info("got udf tree:" + new ObjectMapper().writer().withDefaultPrettyPrinter().writeValueAsString(udfTree))
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-common/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-common/pom.xml
index cb14b9a..8478eea 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-common/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-common/pom.xml
@@ -18,7 +18,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-service/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-service/pom.xml
index 7c6a963..bf30742 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-service/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-udf/linkis-udf-service/pom.xml
@@ -18,7 +18,7 @@
<parent>
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/linkis-publicservice/linkis-variable/pom.xml b/linkis-public-enhancements/linkis-publicservice/linkis-variable/pom.xml
index 79d76d0..f043756 100644
--- a/linkis-public-enhancements/linkis-publicservice/linkis-variable/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/linkis-variable/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-variable</artifactId>
diff --git a/linkis-public-enhancements/linkis-publicservice/pom.xml b/linkis-public-enhancements/linkis-publicservice/pom.xml
index 24d1b52..7f473fa 100644
--- a/linkis-public-enhancements/linkis-publicservice/pom.xml
+++ b/linkis-public-enhancements/linkis-publicservice/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-public-enhancements/pom.xml b/linkis-public-enhancements/pom.xml
index b79b173..5529192 100644
--- a/linkis-public-enhancements/pom.xml
+++ b/linkis-public-enhancements/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml b/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml
index 1a726d6..bf5deff 100644
--- a/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-discovery/linkis-eureka/pom.xml
@@ -18,7 +18,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-eureka</artifactId>
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml
index 26ddb46..0b9fc32 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/pom.xml
@@ -19,7 +19,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-gateway-core</artifactId>
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala
index fcb5943..273810f 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-core/src/main/scala/com/webank/wedatasphere/linkis/gateway/security/UserRestful.scala
@@ -100,7 +100,7 @@
case "login" =>
Utils.tryCatch {
val loginUser = GatewaySSOUtils.getLoginUsername(gatewayContext)
- Message.error(loginUser + "Already logged in, please log out before signing in(已经登录,请先退出再进行登录)!").data("redirectToIndex", true)
+ Message.ok(loginUser + "Already logged in, please log out before signing in(已经登录,请先退出再进行登录)!").data("userName", loginUser)
}(_ => login(gatewayContext))
case "logout" => logout(gatewayContext)
case "userInfo" => userInfo(gatewayContext)
@@ -423,4 +423,4 @@
}
-}
\ No newline at end of file
+}
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml
index 5508eda..81ba798 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<artifactId>linkis-gateway-httpclient-support</artifactId>
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/com/webank/wedatasphere/linkis/httpclient/dws/DWSHttpClient.scala b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/com/webank/wedatasphere/linkis/httpclient/dws/DWSHttpClient.scala
index 39a216d..e8208f3 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/com/webank/wedatasphere/linkis/httpclient/dws/DWSHttpClient.scala
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-httpclient-support/src/main/scala/com/webank/wedatasphere/linkis/httpclient/dws/DWSHttpClient.scala
@@ -11,16 +11,13 @@
* limitations under the License.
*/
-/*
- * created by cooperyang on 2019/07/24.
- */
package com.webank.wedatasphere.linkis.httpclient.dws
import java.util
import com.webank.wedatasphere.linkis.common.io.{Fs, FsPath}
-import com.webank.wedatasphere.linkis.common.utils.JsonUtils
+import com.webank.wedatasphere.linkis.common.utils.{JsonUtils, Logging}
import com.webank.wedatasphere.linkis.httpclient.AbstractHttpClient
import com.webank.wedatasphere.linkis.httpclient.discovery.Discovery
import com.webank.wedatasphere.linkis.httpclient.dws.config.DWSClientConfig
@@ -37,7 +34,7 @@
import scala.collection.JavaConversions
class DWSHttpClient(clientConfig: DWSClientConfig, clientName: String)
- extends AbstractHttpClient(clientConfig, clientName) {
+ extends AbstractHttpClient(clientConfig, clientName) with Logging{
override protected def createDiscovery(): Discovery = new DWSGatewayDiscovery
@@ -51,9 +48,14 @@
}
override protected def httpResponseToResult(response: HttpResponse, requestAction: HttpAction, responseBody: String): Option[Result] = {
- var entity = response.getEntity
+ val entity = response.getEntity
val statusCode: Int = response.getStatusLine.getStatusCode
val url: String = requestAction.getURL
+
+ if (null == entity.getContentType && statusCode == 200) {
+ info("response is null, return success Result")
+ return Some(Result())
+ }
val contentType: String = entity.getContentType.getValue
DWSHttpMessageFactory.getDWSHttpMessageResult(url).map { case DWSHttpMessageResultInfo(_, clazz) =>
clazz match {
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml
index 170473f..177d65f 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-gateway-server-support/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/pom.xml
index f3dbc88..2c2dcf6 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-gateway/linkis-spring-cloud-gateway/pom.xml
@@ -20,7 +20,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../../pom.xml</relativePath>
</parent>
<artifactId>linkis-spring-cloud-gateway</artifactId>
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml
index 529888c..dc064a0 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-gateway/plugins/linkis-gateway-datasource-ruler/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-spring-cloud-services/linkis-service-gateway/pom.xml b/linkis-spring-cloud-services/linkis-service-gateway/pom.xml
index a79410b..924ebbe 100644
--- a/linkis-spring-cloud-services/linkis-service-gateway/pom.xml
+++ b/linkis-spring-cloud-services/linkis-service-gateway/pom.xml
@@ -21,7 +21,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<relativePath>../../pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/linkis-spring-cloud-services/pom.xml b/linkis-spring-cloud-services/pom.xml
index 9628909..bad3206 100644
--- a/linkis-spring-cloud-services/pom.xml
+++ b/linkis-spring-cloud-services/pom.xml
@@ -5,7 +5,7 @@
<parent>
<artifactId>linkis</artifactId>
<groupId>com.webank.wedatasphere.linkis</groupId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
</parent>
<modelVersion>4.0.0</modelVersion>
diff --git a/pom.xml b/pom.xml
index eaf012b..bc8af19 100644
--- a/pom.xml
+++ b/pom.xml
@@ -19,7 +19,7 @@
<groupId>com.webank.wedatasphere.linkis</groupId>
<artifactId>linkis</artifactId>
- <version>1.0.0</version>
+ <version>1.0.1</version>
<packaging>pom</packaging>
<name>Linkis Project Parent POM</name>
@@ -69,7 +69,7 @@
</modules>
<properties>
- <linkis.version>1.0.0</linkis.version>
+ <linkis.version>1.0.1</linkis.version>
<hadoop.version>2.7.2</hadoop.version>
<spring.eureka.version>2.2.1.RELEASE</spring.eureka.version>
<spring.feign.version>2.2.1.RELEASE</spring.feign.version>
diff --git a/sbin/ext/linkis-cg-engineconnmanager b/sbin/ext/linkis-cg-engineconnmanager
deleted file mode 100644
index f4570b1..0000000
--- a/sbin/ext/linkis-cg-engineconnmanager
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: ecm start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-computation-governance/linkis-cg-engineconnmanager"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.ecm.server.ECMApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$ENGINECONNMANAGER_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$ENGINECONNMANAGER_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-cg-engineplugin b/sbin/ext/linkis-cg-engineplugin
deleted file mode 100644
index b894e6b..0000000
--- a/sbin/ext/linkis-cg-engineplugin
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: ecp start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-computation-governance/linkis-cg-engineplugin"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.engineplugin.server.LinkisEngineConnPluginServer
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$ENGINECONN_PLUGIN_SERVER_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$ENGINECONN_PLUGIN_SERVER_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-cg-entrance b/sbin/ext/linkis-cg-entrance
deleted file mode 100644
index 2e034c4..0000000
--- a/sbin/ext/linkis-cg-entrance
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: entrnace start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-computation-governance/linkis-cg-entrance"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.entrance.LinkisEntranceApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$ENTRANCE_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$ENTRANCE_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-cg-linkismanager b/sbin/ext/linkis-cg-linkismanager
deleted file mode 100644
index 19661c1..0000000
--- a/sbin/ext/linkis-cg-linkismanager
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: manager start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-computation-governance/linkis-cg-linkismanager"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.manager.am.LinkisManagerApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$MANAGER_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$MANAGER_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-mg-eureka b/sbin/ext/linkis-mg-eureka
deleted file mode 100644
index 8c6244d..0000000
--- a/sbin/ext/linkis-mg-eureka
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/bin/bash
-#
-# description: eureka start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-spring-cloud-services/linkis-mg-eureka"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-export SERVER_HEAP_SIZE="1G"
-
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.eureka.SpringCloudEurekaApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$SERVER_LIB/*
-
-## set spring args
-if [ "$EUREKA_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$EUREKA_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-if [ "$$EUREKA_PREFER_IP" == "true" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.instance.preferIpAddress=true "
-else
- SPRING_ARGS="$SPRING_ARGS --eureka.instance.hostname=$EUREKA_HOSTNAME"
-fi
-
-SPRING_ARGS="$SPRING_ARGS --spring.profiles.active=eureka"
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-mg-gateway b/sbin/ext/linkis-mg-gateway
deleted file mode 100644
index baf6e39..0000000
--- a/sbin/ext/linkis-mg-gateway
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/bin/bash
-#
-# description: gateway start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-spring-cloud-services/linkis-mg-gateway"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.gateway.springcloud.LinkisGatewayApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$SERVER_LIB/*
-
-## set spring args
-if [ "$GATEWAY_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$GATEWAY_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-ps-bml b/sbin/ext/linkis-ps-bml
deleted file mode 100644
index e3f00ed..0000000
--- a/sbin/ext/linkis-ps-bml
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: bml start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-public-enhancements/linkis-ps-bml"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.bml.LinkisBMLApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$BML_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$BML_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-ps-cs b/sbin/ext/linkis-ps-cs
deleted file mode 100644
index b5bba54..0000000
--- a/sbin/ext/linkis-ps-cs
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: cs start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-public-enhancements/linkis-ps-cs"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.cs.server.LinkisCSApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$CS_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$CS_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-ps-datasource b/sbin/ext/linkis-ps-datasource
deleted file mode 100644
index f9b8fa6..0000000
--- a/sbin/ext/linkis-ps-datasource
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: datasource start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-public-enhancements/linkis-ps-datasource"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.metadata.LinkisDataSourceApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$DATASOURCE_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$DATASOURCE_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/ext/linkis-ps-publicservice b/sbin/ext/linkis-ps-publicservice
deleted file mode 100644
index d7757d3..0000000
--- a/sbin/ext/linkis-ps-publicservice
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/bin/bash
-#
-# description: publicservice start cmd
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-# get log directory
-
-source $LINKIS_CONF_DIR/linkis-env.sh
-SERVER_SUFFIX="linkis-public-enhancements/linkis-ps-publicservice"
-## set log
-if [ "$LINKIS_LOG_DIR" = "" ]; then
- export LINKIS_LOG_DIR="$LINKIS_HOME/logs"
-fi
-export SERVER_LOG_PATH=$LINKIS_LOG_DIR/$SERVER_SUFFIX
-if [ ! -w "$SERVER_LOG_PATH" ] ; then
- mkdir -p "$SERVER_LOG_PATH"
-fi
-
-if test -z "$SERVER_HEAP_SIZE"
-then
- export SERVER_HEAP_SIZE="512M"
-fi
-
-if test -z "$SERVER_JAVA_OPTS"
-then
- export SERVER_JAVA_OPTS=" -Xmx$SERVER_HEAP_SIZE -XX:+UseG1GC -Xloggc:$SERVER_LOG_PATH/linkis-gc.log"
-fi
-
-
-export SERVER_CLASS=com.webank.wedatasphere.linkis.DataWorkCloudApplication
-
-## conf dir
-export SERVER_CONF_PATH=$LINKIS_CONF_DIR:$LINKIS_CONF_DIR/$SERVER_SUFFIX
-
-## commons lib
-export LINKIS_COMMONS_LIB=$LINKIS_HOME/$LINKIS_PUBLIC_MODULE
-if [ ! -r "$LINKIS_COMMONS_LIB" ] ; then
- echo "linkis commons lib not exists $LINKIS_COMMONS_LIB"
- exit 1
-fi
-
-## server lib
-export SERVER_LIB=$LINKIS_HOME/lib/$SERVER_SUFFIX
-if [ ! -r "$SERVER_LIB" ] ; then
- echo "server lib not exists $SERVER_LIB"
- exit 1
-fi
-
-## set class path
-export SERVER_CLASS_PATH=$SERVER_CONF_PATH:$LINKIS_COMMONS_LIB/*:$SERVER_LIB/*
-
-## set spring args
-if [ "$PUBLICSERVICE_PORT" != "" ]; then
- SPRING_ARGS="--server.port=$PUBLICSERVICE_PORT"
-fi
-
-if [ "$EUREKA_URL" != "" ]; then
- SPRING_ARGS="$SPRING_ARGS --eureka.client.serviceUrl.defaultZone=$EUREKA_URL"
-fi
-
-nohup java $SERVER_JAVA_OPTS -cp $SERVER_CLASS_PATH $SERVER_CLASS $SPRING_ARGS 2>&1 > $SERVER_LOG_PATH/linkis.out &
-
-pid=$!
-sleep 2
-if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME start failed!"
- exit 1
-else
- echo "server $SERVER_NAME start succeeded!"
- echo $pid > $SERVER_PID
-fi
diff --git a/sbin/linkis-daemon.sh b/sbin/linkis-daemon.sh
deleted file mode 100644
index e3a0d5f..0000000
--- a/sbin/linkis-daemon.sh
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/bin/bash
-#
-# description: Starts and stops Server
-#
-# @name: linkis-demo
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-
-
-cd `dirname $0`
-cd ..
-INSTALL_HOME=`pwd`
-
-
-function print_usage(){
- echo "Usage: linkis-daemon [start | stop | restart | status] [serverName]"
- echo " serverName The service name of the operation"
- echo "Most commands print help when invoked w/o parameters."
-}
-
-if [ $# != 2 ]; then
- print_usage
- exit 2
-fi
-
-# set LINKIS_HOME
-if [ "$LINKIS_HOME" = "" ]; then
- export LINKIS_HOME=$INSTALL_HOME
-fi
-
-# set LINKIS_CONF_DIR
-if [ "$LINKIS_CONF_DIR" = "" ]; then
- export LINKIS_CONF_DIR=$LINKIS_HOME/conf
-fi
-
-#source $LINKIS_CONF_DIR/linkis-env.sh
-
-# get pid directory
-if [ "$LINKIS_PID_DIR" = "" ]; then
- export LINKIS_PID_DIR="$LINKIS_HOME/pid"
-fi
-if [ ! -w "$LINKIS_PID_DIR" ] ; then
- mkdir -p "$LINKIS_PID_DIR"
-fi
-
-function start()
-{
- echo "Start to check whether the $SERVER_NAME is running"
- if [[ -f "${SERVER_PID}" ]]; then
- pid=$(cat ${SERVER_PID})
- if kill -0 ${pid} >/dev/null 2>&1; then
- echo "$SERVER_NAME is already running."
- exit 1
- fi
- fi
- export SERVER_START_BIN=$LINKIS_HOME/sbin/ext/linkis-$SERVER_NAME
- if [[ ! -f "${SERVER_START_BIN}" ]]; then
- echo "The $SERVER_NAME is wrong or the corresponding startup script does not exist: "
- echo "$SERVER_START_BIN"
- exit 1
- else
- echo "Start to start server, startup script: $SERVER_START_BIN"
- sh $SERVER_START_BIN
- fi
-}
-
-function wait_for_server_to_die() {
- local pid
- local count
- pid=$1
- timeout=$2
- count=0
- timeoutTime=$(date "+%s")
- let "timeoutTime+=$timeout"
- currentTime=$(date "+%s")
- forceKill=1
-
- while [[ $currentTime -lt $timeoutTime ]]; do
- $(kill ${pid} > /dev/null 2> /dev/null)
- if kill -0 ${pid} > /dev/null 2>&1; then
- sleep 3
- else
- forceKill=0
- break
- fi
- currentTime=$(date "+%s")
- done
-
- if [[ forceKill -ne 0 ]]; then
- $(kill -9 ${pid} > /dev/null 2> /dev/null)
- fi
-}
-
-
-function stop()
-{
- if [[ ! -f "${SERVER_PID}" ]]; then
- echo "server $SERVER_NAME is not running"
- else
- pid=$(cat ${SERVER_PID})
- if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME is not running"
- else
- wait_for_server_to_die $pid 40
- $(rm -f ${SERVER_PID})
- echo "server $SERVER_NAME is stopped."
- fi
- fi
-}
-
-function restart()
-{
- stop
- sleep 2
- start
-}
-
-status()
-{
- if [[ ! -f "${SERVER_PID}" ]]; then
- echo "server $SERVER_NAME is stopped"
- else
- pid=$(cat ${SERVER_PID})
- if [[ -z "${pid}" ]]; then
- echo "server $SERVER_NAME is not running"
- else
- echo "server $SERVER_NAME is running."
- fi
- fi
-
-}
-
-COMMAND=$1
-SERVER_NAME=$2
-export SERVER_PID=$LINKIS_PID_DIR/linkis_$SERVER_NAME.pid
-case $COMMAND in
- start|stop|restart|status)
- $COMMAND $SERVER_NAME
- ;;
- *)
- print_usage
- exit 2
- ;;
-esac
diff --git a/sbin/linkis-start-all.sh b/sbin/linkis-start-all.sh
deleted file mode 100644
index 5d600bf..0000000
--- a/sbin/linkis-start-all.sh
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/bin/bash
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# description: Start all Server
-#
-# @name: start-all
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-#Actively load user env
-source /etc/profile
-source ~/.bash_profile
-
-cd `dirname $0`
-cd ..
-INSTALL_HOME=`pwd`
-
-# set LINKIS_HOME
-if [ "$LINKIS_HOME" = "" ]; then
- export LINKIS_HOME=$INSTALL_HOME
-fi
-
-# Start all linkis applications
-info="We will start all linkis applications, it will take some time, please wait"
-echo ${info}
-
-
-
-
-
-source ${LINKIS_HOME}/bin/common.sh
-
-local_host="`hostname --fqdn`"
-
-ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
-
-
-
-function startApp(){
-echo "<-------------------------------->"
-echo "Begin to start $SERVER_NAME"
-SERVER_START_CMD="sh $LINKIS_HOME/sbin/linkis-daemon.sh restart $SERVER_NAME"
-if test -z "$SERVER_IP"
-then
- SERVER_IP=$local_host
-fi
-
-isLocal $SERVER_IP
-flag=$?
-echo "Is local "$flag
-if [ $flag == "0" ];then
- eval $SERVER_START_CMD
-else
- ssh -p $SSH_PORT $SERVER_IP $SERVER_START_CMD
-fi
-isSuccess "End to start $SERVER_NAME"
-echo "<-------------------------------->"
-sleep 3
-}
-
-
-#eureka
-export SERVER_NAME="mg-eureka"
-SERVER_IP=$EUREKA_INSTALL_IP
-startApp
-
-
-#gateway
-SERVER_NAME="mg-gateway"
-SERVER_IP=$GATEWAY_INSTALL_IP
-startApp
-
-#publicservice
-SERVER_NAME="ps-publicservice"
-SERVER_IP=$PUBLICSERVICE_INSTALL_IP
-startApp
-
-
-#metadata
-SERVER_NAME="ps-datasource"
-SERVER_IP=$DATASOURCE_INSTALL_IP
-startApp
-
-#bml
-SERVER_NAME="ps-bml"
-SERVER_IP=$BML_INSTALL_IP
-startApp
-
-#cs-server
-SERVER_NAME="ps-cs"
-SERVER_IP=$CS_INSTALL_IP
-startApp
-
-
-#manager
-SERVER_NAME="cg-linkismanager"
-SERVER_IP=$MANAGER_INSTALL_IP
-startApp
-
-
-
-
-
-#entrnace
-SERVER_NAME="cg-entrance"
-SERVER_IP=$ENTRANCE_INSTALL_IP
-startApp
-
-#ecm
-SERVER_NAME="cg-engineconnmanager"
-SERVER_IP=$ENGINECONNMANAGER_INSTALL_IP
-startApp
-
-#ecp
-SERVER_NAME="cg-engineplugin"
-SERVER_IP=$ENGINECONN_PLUGIN_SERVER_INSTALL_IP
-startApp
-
-echo "start-all shell script executed completely"
-
-echo "Start to check all dss microservice"
-
-function checkServer(){
-echo "<-------------------------------->"
-echo "Begin to check $SERVER_NAME"
-if test -z "$SERVER_IP"
-then
- SERVER_IP=$local_host
-fi
-
-SERVER_BIN=${LINKIS_HOME}/$SERVER_NAME/bin
-
-if ! executeCMD $SERVER_IP "test -e $SERVER_BIN"; then
- echo "$SERVER_NAME is not installed,the checkServer steps will be skipped"
- return
-fi
-
-sh $workDir/bin/checkServices.sh $SERVER_NAME $SERVER_IP $SERVER_PORT
-isSuccess "start $SERVER_NAME "
-echo "<-------------------------------->"
-sleep 5
-}
-
-echo "Linkis started successfully"
diff --git a/sbin/linkis-stop-all.sh b/sbin/linkis-stop-all.sh
deleted file mode 100644
index 86ccb9f..0000000
--- a/sbin/linkis-stop-all.sh
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright 2019 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# description: Stop all Server
-#
-# @name: stop-all
-# @created: 01.16.2021
-#
-# Modified for Linkis 1.0.0
-#Actively load user env
-source /etc/profile
-source ~/.bash_profile
-
-cd `dirname $0`
-cd ..
-INSTALL_HOME=`pwd`
-
-# set LINKIS_HOME
-if [ "$LINKIS_HOME" = "" ]; then
- export LINKIS_HOME=$INSTALL_HOME
-fi
-
-info="We will stop all linkis applications, it will take some time, please wait"
-echo ${info}
-
-
-
-
-source ${LINKIS_HOME}/bin/common.sh
-
-local_host="`hostname --fqdn`"
-
-ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
-
-
-function stopApp(){
-echo "<-------------------------------->"
-echo "Begin to stop $SERVER_NAME"
-SERVER_STOP_CMD="sh $LINKIS_HOME/sbin/linkis-daemon.sh stop $SERVER_NAME"
-if test -z "$SERVER_IP"
-then
- SERVER_IP=$local_host
-fi
-
-isLocal $SERVER_IP
-flag=$?
-echo "Is local "$flag
-if [ $flag == "0" ];then
- eval $SERVER_STOP_CMD
-else
- ssh -p $SSH_PORT $SERVER_IP $SERVER_STOP_CMD
-fi
-echo "<-------------------------------->"
-}
-
-
-
-#gateway
-SERVER_NAME="mg-gateway"
-SERVER_IP=$GATEWAY_INSTALL_IP
-stopApp
-
-#publicservice
-SERVER_NAME="ps-publicservice"
-SERVER_IP=$PUBLICSERVICE_INSTALL_IP
-stopApp
-
-
-#metadata
-SERVER_NAME="ps-datasource"
-SERVER_IP=$DATASOURCE_INSTALL_IP
-stopApp
-
-#bml
-SERVER_NAME="ps-bml"
-SERVER_IP=$BML_INSTALL_IP
-stopApp
-
-#cs-server
-SERVER_NAME="ps-cs"
-SERVER_IP=$CS_INSTALL_IP
-stopApp
-
-
-
-
-
-#ecm
-SERVER_NAME="cg-engineconnmanager"
-SERVER_IP=$ENGINECONNMANAGER_INSTALL_IP
-stopApp
-
-
-#entrnace
-SERVER_NAME="cg-entrance"
-SERVER_IP=$ENTRANCE_INSTALL_IP
-stopApp
-
-#ecp
-SERVER_NAME="cg-engineplugin"
-SERVER_IP=$ENGINECONN_PLUGIN_SERVER_INSTALL_IP
-stopApp
-
-#manager
-SERVER_NAME="cg-linkismanager"
-SERVER_IP=$MANAGER_INSTALL_IP
-stopApp
-
-#eureka
-export SERVER_NAME="mg-eureka"
-SERVER_IP=$EUREKA_INSTALL_IP
-stopApp
-
-echo "stop-all shell script executed completely"
diff --git a/web/config.sh b/web/config.sh
index 0ebe48d..4273ff7 100644
--- a/web/config.sh
+++ b/web/config.sh
@@ -1,8 +1,8 @@
#Configuring front-end ports
-dss_port="8088"
+linkis_port="8088"
#URL of the backend linkis gateway
linkis_url="http://localhost:20401"
-#dss ip address
-dss_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
+#linkis ip address
+linkis_ipaddr=$(ip addr | awk '/^[0-9]+: / {}; /inet.*global/ {print gensub(/(.*)\/(.*)/, "\\1", "g", $2)}')
diff --git a/web/install.sh b/web/install.sh
index 01a7b33..4ce6639 100644
--- a/web/install.sh
+++ b/web/install.sh
@@ -4,30 +4,30 @@
workDir=$(cd `dirname $0`; pwd)
-echo "dss front-end deployment script"
+echo "linkis front-end deployment script"
source $workDir/config.sh
# 前端放置目录,默认为解压目录
-dss_basepath=$workDir
+linkis_basepath=$workDir
#To be compatible with MacOS and Linux
if [[ "$OSTYPE" == "darwin"* ]]; then
# Mac OSX
- echo "dss install not support Mac OSX operating system"
+ echo "linkis install not support Mac OSX operating system"
exit 1
elif [[ "$OSTYPE" == "linux-gnu" ]]; then
# linux
echo "linux"
elif [[ "$OSTYPE" == "cygwin" ]]; then
# POSIX compatibility layer and Linux environment emulation for Windows
- echo "dss not support Windows operating system"
+ echo "linkis not support Windows operating system"
exit 1
elif [[ "$OSTYPE" == "msys" ]]; then
# Lightweight shell and GNU utilities compiled for Windows (part of MinGW)
- echo "dss not support Windows operating system"
+ echo "linkis not support Windows operating system"
exit 1
elif [[ "$OSTYPE" == "win32" ]]; then
- echo "dss not support Windows operating system"
+ echo "linkis not support Windows operating system"
exit 1
elif [[ "$OSTYPE" == "freebsd"* ]]; then
# ...
@@ -44,18 +44,18 @@
echo "========================================================================配置信息======================================================================="
-echo "前端访问端口:${dss_port}"
+echo "前端访问端口:${linkis_port}"
echo "后端Linkis的地址:${linkis_url}"
-echo "静态文件地址:${dss_basepath}/dist"
+echo "静态文件地址:${linkis_basepath}/dist"
echo "当前路径:${workDir}"
-echo "本机ip:${dss_ipaddr}"
+echo "本机ip:${linkis_ipaddr}"
echo "========================================================================配置信息======================================================================="
echo ""
# 创建文件并配置nginx
-dssConf(){
+linkisConf(){
s_host='$host'
s_remote_addr='$remote_addr'
@@ -63,16 +63,16 @@
s_http_upgrade='$http_upgrade'
echo "
server {
- listen $dss_port;# 访问端口
+ listen $linkis_port;# 访问端口
server_name localhost;
#charset koi8-r;
#access_log /var/log/nginx/host.access.log main;
- location /dss/visualis {
- root ${dss_basepath}/dss/visualis; # 静态文件目录
+ location /linkis/visualis {
+ root ${linkis_basepath}/linkis/visualis; # 静态文件目录
autoindex on;
}
location / {
- root ${dss_basepath}/dist; # 静态文件目录
+ root ${linkis_basepath}/dist; # 静态文件目录
index index.html index.html;
}
location /ws {
@@ -105,7 +105,7 @@
root /usr/share/nginx/html;
}
}
- " > /etc/nginx/conf.d/dss.conf
+ " > /etc/nginx/conf.d/linkis.conf
}
@@ -117,14 +117,14 @@
echo "nginx 安装成功"
# 配置nginx
- dssConf
+ linkisConf
# 解决 0.0.0.0:8888 问题
yum -y install policycoreutils-python
- semanage port -a -t http_port_t -p tcp $dss_port
+ semanage port -a -t http_port_t -p tcp $linkis_port
# 开放前端访问端口
- firewall-cmd --zone=public --add-port=$dss_port/tcp --permanent
+ firewall-cmd --zone=public --add-port=$linkis_port/tcp --permanent
# 重启防火墙
firewall-cmd --reload
@@ -156,16 +156,16 @@
yum install nginx -y
# 配置nginx
- dssConf
+ linkisConf
# 防火墙
- S_iptables=`lsof -i:$dss_port | wc -l`
+ S_iptables=`lsof -i:$linkis_port | wc -l`
if [ "$S_iptables" -gt "0" ];then
# 已开启端口防火墙重启
service iptables restart
else
# 未开启防火墙添加端口再重启
- iptables -I INPUT 5 -i eth0 -p tcp --dport $dss_port -m state --state NEW,ESTABLISHED -j ACCEPT
+ iptables -I INPUT 5 -i eth0 -p tcp --dport $linkis_port -m state --state NEW,ESTABLISHED -j ACCEPT
service iptables save
service iptables restart
fi
@@ -190,6 +190,6 @@
if [[ $version -eq 7 ]]; then
centos7
fi
-echo '安装visualis前端,用户自行编译DSS前端安装包,则安装时需要把visualis的前端安装包放置于此'$dss_basepath/dss/visualis',用于自动化安装:'
-cd $dss_basepath/dss/visualis;unzip -o build.zip > /dev/null
-echo "请浏览器访问:http://${dss_ipaddr}:${dss_port}"
+echo '安装visualis前端,用户自行编译linkis前端安装包,则安装时需要把visualis的前端安装包放置于此'$linkis_basepath/linkis/visualis',用于自动化安装:'
+cd $linkis_basepath/linkis/visualis;unzip -o build.zip > /dev/null
+echo "请浏览器访问:http://${linkis_ipaddr}:${linkis_port}"
diff --git a/web/package.json b/web/package.json
index 27c2efe..a256a45 100644
--- a/web/package.json
+++ b/web/package.json
@@ -1,6 +1,6 @@
{
"name": "linkis",
- "version": "1.0.0",
+ "version": "1.0.1",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
diff --git a/web/vue.config.js b/web/vue.config.js
index a3484af..d8737a0 100644
--- a/web/vue.config.js
+++ b/web/vue.config.js
@@ -161,10 +161,10 @@
{ source: './install.sh', destination: `./dist` }
],
// 先删除根目录下的zip包
- delete: [`./wedatasphere-DataSphereStudio-${getVersion()}-dist.zip`],
+ delete: [`./wedatasphere-linkis-${getVersion()}-dist.zip`],
// 将dist文件夹下的文件进行打包
archive: [
- { source: './dist', destination: `./wedatasphere-DataSphereStudio-${getVersion()}-dist.zip` },
+ { source: './dist', destination: `./wedatasphere-linkis-${getVersion()}-dist.zip` },
]
},
}])