diff --git a/.gitignore b/.gitignore
index d8b0f432b..0d196cecf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -20,4 +20,11 @@ streamis-appconn/target
/logs/streamis-server.log
/logs/linkis.log
/test/target
-/test/src
\ No newline at end of file
+/test/src
+/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/target/
+/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/target/
+/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/target/
+/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/target/
+/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/target/
+/streamis-jobmanager/streamis-job-log/streamis-job-log-common/target/
+/streamis-jobmanager/streamis-job-log/streamis-job-log-server/target/
diff --git a/README-ZH.md b/README-ZH.md
index 9a0c75669..192e35aa1 100644
--- a/README-ZH.md
+++ b/README-ZH.md
@@ -46,16 +46,16 @@ Dimension节点、Transform节点、Sink节点 和 [Visualis](https://github.com
![流式应用配置](docs/images/stream_job_config_1.png)
![流式应用配置](docs/images/stream_job_config_2.png)
- 更多功能,请参考:[Streamis 用户手册](docs/zh_CN/0.2.0/使用文档/Streamis用户手册.md)。
+ 更多功能,请参考:[Streamis 用户手册](docs/zh_CN/使用文档/Streamis用户手册.md)。
----
## 依赖的生态组件
-| 依赖的应用工具 | 描述 | Streamis 兼容版本 |
-|--------------|---------------------------------------------------------------|--------------|
-| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | 数据应用开发管理集成框架。以工作流式的图形化拖拽开发体验,将满足从数据交换、脱敏清洗、分析挖掘、质量检测、可视化展现、定时调度到数据输出应用等,数据应用开发全流程场景需求。 | >= DSS1.1.0(已发布)|
-| [Linkis](https://github.com/apache/incubator-linkis) | 计算中间件 Apache Linkis,通过提供 REST/WebSocket/JDBC/SDK 等标准接口,上层应用可以方便地连接访问 MySQL/Spark/Hive/Presto/Flink 等底层引擎。 | >= Linkis1.1.1(已发布),部分功能需要Linkis 1.1.2支持 |
+| 依赖的应用工具 | 描述 | Streamis 兼容版本 |
+|--------------|---------------------------------------------------------------|---------------------------------------------|
+| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | 数据应用开发管理集成框架。以工作流式的图形化拖拽开发体验,将满足从数据交换、脱敏清洗、分析挖掘、质量检测、可视化展现、定时调度到数据输出应用等,数据应用开发全流程场景需求。 | >= DSS1.1.2(已发布) |
+| [Linkis](https://github.com/apache/incubator-linkis) | 计算中间件 Apache Linkis,通过提供 REST/WebSocket/JDBC/SDK 等标准接口,上层应用可以方便地连接访问 MySQL/Spark/Hive/Presto/Flink 等底层引擎。 | >= Linkis1.4.0(已发布) |
----
@@ -73,14 +73,14 @@ Dimension节点、Transform节点、Sink节点 和 [Visualis](https://github.com
## 编译和安装部署
- 请参考 [Streamis 安装部署文档](docs/zh_CN/0.2.0/Streamis安装文档.md) ,用于安装部署 Streamis 及其依赖环境。
+ 请参考 [Streamis 安装部署文档](docs/zh_CN/安装文档/Streamis安装文档.md) ,用于安装部署 Streamis 及其依赖环境。
----
## 示例和使用指引
- 请到 [用户使用文档](docs/zh_CN/0.2.0/使用文档/Streamis用户手册.md) ,了解如何快速使用 Streamis。
+ 请到 [用户使用文档](docs/zh_CN/使用文档/Streamis用户手册.md) ,了解如何快速使用 Streamis。
----
diff --git a/README.md b/README.md
index a174ee03d..505241267 100644
--- a/README.md
+++ b/README.md
@@ -52,10 +52,10 @@ The Dimension node, Transform node, Sink node and [Visualis](https://github.com/
## Depended ecosystems
-| Depended Component | Description | Streamis compatibility |
-| -------------- | -------------------------------------------------------------- | --------------|
-| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | Data application development management framework. With a unified UI, the workflow-like graphical drag-and-drop development experience meets the entire lifecycle of data application development from data import, desensitization cleaning, data analysis, data mining, quality inspection, visualization, scheduling to data output applications, etc. | >= DSS1.1.0 (Released) |
-| [Linkis](https://github.com/apache/incubator-linkis) | Apache Linkis, builds a layer of computation middleware, by using standard interfaces such as REST/WS/JDBC provided by Linkis, the upper applications can easily access the underlying engines such as MySQL/Spark/Hive/Presto/Flink, etc. | >= Linkis1.1.1 (Released),some functions need to be supported by linkis 1.1.2 |
+| Depended Component | Description | Streamis compatibility |
+| -------------- | -------------------------------------------------------------- |------------------------------|
+| [DataSphereStudio](https://github.com/WeBankFinTech/DataSphereStudio) | Data application development management framework. With a unified UI, the workflow-like graphical drag-and-drop development experience meets the entire lifecycle of data application development from data import, desensitization cleaning, data analysis, data mining, quality inspection, visualization, scheduling to data output applications, etc. | >= DSS1.1.2 (Released) |
+| [Linkis](https://github.com/apache/linkis) | Apache Linkis, builds a layer of computation middleware, by using standard interfaces such as REST/WS/JDBC provided by Linkis, the upper applications can easily access the underlying engines such as MySQL/Spark/Hive/Presto/Flink, etc. | >= Linkis1.4.0 (Released) |
## Demo Trial environment
@@ -71,7 +71,7 @@ The Dimension node, Transform node, Sink node and [Visualis](https://github.com/
## Compile and install deployment
-please refer to [Streamis Installation and Deployment Document](docs/en_US/0.2.0/StreamisDeployment.md) for installing and deploying Streamis.
+please refer to [Streamis Installation and Deployment Document](docs/en_US/0.3.0/StreamisDeployment.md) for installing and deploying Streamis.
----
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 8f8038bbc..002ccdadc 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -20,7 +20,7 @@
streamis
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/bin/upgrade.sh b/bin/upgrade.sh
deleted file mode 100644
index 3a71a8eed..000000000
--- a/bin/upgrade.sh
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env bash
-#
-# Copyright 2022 WeBank
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Use to upgrade from 0.1.0 to 0.2.0
-
-if [ -f "~/.bashrc" ];then
- echo "Warning! user bashrc file does not exist."
-else
- source ~/.bashrc
-fi
-
-shellDir=`dirname $0`
-workDir=`cd ${shellDir}/..;pwd`
-
-interact_echo(){
- while [ 1 ]; do
- read -p "$1 (Y/N)" yn
- if [[ "${yn}x" == "Yx" ]] || [[ "${yn}x" == "yx" ]]; then
- return 0
- elif [[ "${yn}x" == "Nx" ]] || [[ "${yn}x" == "nx" ]]; then
- return 1
- else
- echo "Unknown choose: [$yn], please choose again."
- fi
- done
-}
-
-interact_echo "Are you sure the current version of Streamis is 0.1.0 and need to upgrade to 0.2.0 ?"
-if [[ $? == 0 ]]; then
- source ${workDir}/conf/db.sh
- echo "<------ Will connect to [${MYSQL_HOST}:${MYSQL_PORT}] to upgrade the tables in database... ------>"
- mysql -h$MYSQL_HOST -P$MYSQL_PORT -u$MYSQL_USER -p$MYSQL_PASSWORD -D$MYSQL_DB --default-character-set=utf8 << EOF 1>/dev/null
- /*Modify the table column*/
- ALTER TABLE \`linkis_stream_job\` MODIFY COLUMN \`project_name\` varchar(100) DEFAULT NULL;
- ALTER TABLE \`linkis_stream_job\` MODIFY COLUMN \`name\` varchar(200) DEFAULT NULL;
- ALTER TABLE \`linkis_stream_project\` MODIFY COLUMN \`name\` varchar(100) DEFAULT NULL;
- ALTER TABLE \`linkis_stream_task\` MODIFY COLUMN \`job_id\` varchar(200) DEFAULT NULL;
- ALTER TABLE \`linkis_stream_task\` MODIFY COLUMN \`linkis_job_id\` varchar(200) DEFAULT NULL;
-
- ALTER TABLE \`linkis_stream_project\` ADD create_time datetime DEFAULT NULL;
- ALTER TABLE \`linkis_stream_project\` ADD last_update_by varchar(50) DEFAULT NULL;
- ALTER TABLE \`linkis_stream_project\` ADD last_update_time datetime DEFAULT NULL;
- ALTER TABLE \`linkis_stream_project\` ADD is_deleted tinyint unsigned DEFAULT 0;
-
- /*Add indexes into the tables*/
- ALTER TABLE \`linkis_stream_job\` ADD UNIQUE KEY(\`project_name\`, \`name\`);
- ALTER TABLE \`linkis_stream_job_version\` ADD UNIQUE KEY(\`job_id\`, \`version\`);
-
- /*Add new tables*/
- DROP TABLE IF EXISTS \`linkis_stream_project_privilege\`;
- CREATE TABLE \`linkis_stream_project_privilege\` (
- \`id\` bigint(20) NOT NULL AUTO_INCREMENT,
- \`project_id\` bigint(20) NOT NULL,
- \`user_name\` varchar(100) NOT NULL,
- \`privilege\` tinyint(1) DEFAULT '0' NOT NULL COMMENT '1:发布权限 ,2:编辑权限 ,3:查看权限',
- PRIMARY KEY (\`id\`) USING BTREE
- ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 COMMENT='项目权限表';
-
- DROP TABLE IF EXISTS \`linkis_stream_job_config_def\`;
- CREATE TABLE \`linkis_stream_job_config_def\` (
- \`id\` bigint(20) NOT NULL AUTO_INCREMENT,
- \`key\` varchar(100) COLLATE utf8_bin NOT NULL,
- \`name\` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Equals option',
- \`type\` varchar(50) COLLATE utf8_bin NOT NULL DEFAULT 'NONE' COMMENT 'def type, NONE: 0, INPUT: 1, SELECT: 2',
- \`sort\` int(10) DEFAULT '0' COMMENT 'In order to sort the configurations that have the same level',
- \`description\` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Description of configuration',
- \`validate_type\` varchar(50) COLLATE utf8_bin DEFAULT NULL COMMENT 'Method the validate the configuration',
- \`validate_rule\` varchar(100) COLLATE utf8_bin DEFAULT NULL COMMENT 'Value of validation rule',
- \`style\` varchar(200) COLLATE utf8_bin DEFAULT '' COMMENT 'Display style',
- \`visiable\` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: hidden, 1: display',
- \`level\` tinyint(1) NOT NULL DEFAULT '1' COMMENT '0: root, 1: leaf',
- \`unit\` varchar(25) COLLATE utf8_bin DEFAULT NULL COMMENT 'Unit symbol',
- \`default_value\` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT 'Default value',
- \`ref_values\` varchar(200) COLLATE utf8_bin DEFAULT '',
- \`parent_ref\` bigint(20) DEFAULT NULL COMMENT 'Parent key of configuration def',
- \`required\` tinyint(1) NOT NULL DEFAULT '0' COMMENT 'If the value of configuration is necessary',
- \`is_temp\` tinyint(1) DEFAULT '0' COMMENT 'Temp configuration',
- PRIMARY KEY (\`id\`),
- UNIQUE KEY \`config_def_key\` (\`key\`)
- ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-
- DROP TABLE IF EXISTS \`linkis_stream_job_config\`;
- CREATE TABLE \`linkis_stream_job_config\` (
- \`job_id\` bigint(20) NOT NULL,
- \`job_name\` varchar(200) COLLATE utf8_bin NOT NULL COMMENT 'Just store the job name',
- \`key\` varchar(100) COLLATE utf8_bin NOT NULL,
- \`value\` varchar(500) COLLATE utf8_bin NOT NULL,
- \`ref_def_id\` bigint(20) DEFAULT NULL COMMENT 'Refer to id in config_def table',
- PRIMARY KEY (\`job_id\`,\`key\`),
- KEY \`config_def_id\` (\`ref_def_id\`)
- ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_bin;
-
- /*Execute dml*/
- source ${workDir}/db/streamis_dml.sql
-
- /*Data migration*/
- INSERT INTO \`linkis_stream_job_config\`(\`key\`, \`value\`, \`job_id\`, \`job_name\`, \`ref_def_id\`) SELECT ov.config_key, ov.config_value, ov.job_id, ov.job_name, d.id as refer_id from linkis_stream_configuration_config_value ov left join linkis_stream_job_config_def d on ov.config_key = d.key WHERE ov.config_value IS NOT NULL AND ov.job_name IS NOT NULL GROUP BY ov.job_id,ov.config_key;
- UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.memory" WHERE \`key\` = "flink.taskmanager.memory";
- UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.cpus" WHERE \`key\` = "flink.taskmanager.cpu.cores";
- UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.cpus" WHERE \`key\` = "wds.linkis.flink.taskManager.cpus";
- UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.taskmanager.numberOfTaskSlots" WHERE \`key\` = "flink.taskmanager.numberOfTaskSlots";
- UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.app.parallelism" WHERE \`key\` = "wds.linkis.engineconn.flink.app.parallelism";
- UPDATE linkis_stream_job_config SET \`key\` = "wds.linkis.flink.jobmanager.memory" WHERE \`key\` = "flink.jobmanager.memory";
- UPDATE linkis_stream_job_config c SET \`ref_def_id\` = (SELECT d.id FROM linkis_stream_job_config_def d WHERE d.\`key\` = c.\`key\`) WHERE c.ref_def_id IS NULL;
- SELECT @flink_extra_param_id:=id FROM linkis_stream_job_config_def WHERE \`key\` = "wds.linkis.flink.custom";
- UPDATE linkis_stream_job_config SET ref_def_id = @flink_extra_param_id WHERE ref_def_id IS NULL;
-
- /*Drop tables*/
- /*DROP TABLE \`linkis_stream_configuration_config_key\`*/
- /*DROP TABLE \`linkis_stream_configuration_config_value\`*/
-
- /*update tables data*/
- delimiter %%
-
- create procedure update_project()
- BEGIN
- -- 声明变量
- DECLARE projectname varchar(50);
- DECLARE done INT default 0;
-
- -- 创建游标,并设置游标所指的数据
- DECLARE cur CURSOR for
- SELECT distinct j.project_name from linkis_stream_job j;
- -- 游标执行完,即遍历结束。设置done的值为1
- DECLARE CONTINUE HANDLER for not FOUND set done = 1;
- -- 开启游标
- open cur;
- -- 执行循环
- posLoop:
- LOOP
- -- 从游标中取出projectname
- FETCH cur INTO projectname ;
- -- 如果done的值为1,即遍历结束,结束循环
- IF done = 1 THEN
- LEAVE posLoop;
- -- 注意,if语句需要添加END IF结束IF
- END IF;
- insert into linkis_stream_project(\`name\`,\`create_by\`,\`create_time\`) values (projectname,\'system\',now());
- -- 关闭循环
- END LOOP posLoop;
- -- 关闭游标
- CLOSE cur;
- -- 关闭分隔标记
- END %%
-
- create procedure update_project_privilege()
- BEGIN
- -- 声明变量
- DECLARE projectid bigint(20);
- DECLARE create_by varchar(50);
- DECLARE done INT default 0;
-
- -- 创建游标,并设置游标所指的数据
- DECLARE cur CURSOR for
- SELECT distinct p.id,j.create_by from linkis_stream_project p,linkis_stream_job j where p.name =j.project_name ;
- -- 游标执行完,即遍历结束。设置done的值为1
- DECLARE CONTINUE HANDLER for not FOUND set done = 1;
- -- 开启游标
- open cur;
- -- 执行循环
- posLoop:
- LOOP
- -- 从游标中取出id
- FETCH cur INTO projectid ,create_by;
- -- 如果done的值为1,即遍历结束,结束循环
- IF done = 1 THEN
- LEAVE posLoop;
- -- 注意,if语句需要添加END IF结束IF
- END IF;
-
- insert into linkis_stream_project_privilege (project_id ,user_name ,privilege) values (projectid,create_by,2);
- -- 关闭循环
- END LOOP posLoop;
- -- 关闭游标
- CLOSE cur;
- -- 关闭分隔标记
- END %%
- delimiter ;
-
- call update_project;
- call update_project_privilege;
-
- drop PROCEDURE update_project;
- drop PROCEDURE update_project_privilege;
-
-EOF
- echo "<------ End to upgrade ------>"
-fi
-
-
-
diff --git a/conf/config.sh b/conf/config.sh
index 094a2a1c7..c0c6536e4 100644
--- a/conf/config.sh
+++ b/conf/config.sh
@@ -40,6 +40,6 @@ GATEWAY_PORT=9001
STREAMIS_SERVER_INSTALL_IP=127.0.0.1
STREAMIS_SERVER_INSTALL_PORT=9400
-STREAMIS_VERSION=0.2.0
+STREAMIS_VERSION=0.3.0
STREAMIS_FILE_NAME="STREAMIS-$STREAMIS_VERSION"
\ No newline at end of file
diff --git a/conf/db.sh b/conf/db.sh
index 176f14419..66b04b788 100644
--- a/conf/db.sh
+++ b/conf/db.sh
@@ -1,8 +1,6 @@
-### for DSS-Server and Eventchecker APPCONN
+### for Streamis-Server
MYSQL_HOST=
MYSQL_PORT=
MYSQL_DB=
MYSQL_USER=
-MYSQL_PASSWORD=
-
-
+MYSQL_PASSWORD=
\ No newline at end of file
diff --git a/db/streamis_ddl.sql b/db/streamis_ddl.sql
index c090acb92..0ca5a47cd 100644
--- a/db/streamis_ddl.sql
+++ b/db/streamis_ddl.sql
@@ -132,9 +132,10 @@ CREATE TABLE `linkis_stream_job` (
`create_time` datetime DEFAULT NULL,
`label` varchar(200) DEFAULT NULL,
`description` varchar(200) DEFAULT NULL,
- `job_type` varchar(30) DEFAULT NULL COMMENT '目前只支持flink.sql、flink.jar',
+ `job_type` varchar(30) DEFAULT NULL COMMENT '目前只支持flink.sql、flink.jar、spark.jar',
`submit_user` varchar(100) DEFAULT NULL,
`workspace_name` varchar(50) DEFAULT NULL,
+ `current_version` varchar(50) DEFAULT NULL,
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY(`project_name`, `name`)
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表';
@@ -149,6 +150,7 @@ CREATE TABLE `linkis_stream_job_version` (
`version` varchar(20) DEFAULT NULL,
`source` varchar(255) DEFAULT NULL COMMENT '这个版本的来源,比如:用户上传,由某个历史版本回退回来的',
`job_content` text COMMENT '内容为meta.json',
+ `manage_mode` varchar(30) DEFAULT 'EngineConn' COMMENT 'Manage mode',
`comment` varchar(255) DEFAULT NULL,
`create_time` datetime DEFAULT NULL,
`create_by` varchar(32) DEFAULT NULL,
@@ -199,6 +201,8 @@ CREATE TABLE `linkis_stream_project_files` (
`create_time` datetime DEFAULT CURRENT_TIMESTAMP,
`create_by` varchar(32) DEFAULT NULL,
`comment` varchar(255) DEFAULT NULL COMMENT '说明',
+ `update_time` datetime DEFAULT NULL,
+ `md5` varchar(100) DEFAULT NULL COMMENT '文件md5',
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表';
diff --git a/db/streamis_dml.sql b/db/streamis_dml.sql
index 85bd4bc96..81561d752 100644
--- a/db/streamis_dml.sql
+++ b/db/streamis_dml.sql
@@ -17,9 +17,10 @@ INSERT INTO `linkis_stream_job_config_def` VALUES (12,'wds.linkis.flink.alert.ru
INSERT INTO `linkis_stream_job_config_def` VALUES (13,'wds.linkis.flink.alert.user','告警用户','NONE',0,'告警用户',NULL,NULL,'',1,1,NULL,NULL,'',NULL,0,0);
INSERT INTO `linkis_stream_job_config_def` VALUES (14,'wds.linkis.flink.alert.level','告警级别','NONE',0,'告警级别','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
INSERT INTO `linkis_stream_job_config_def` VALUES (15,'wds.linkis.flink.alert.failure.level','失败时告警级别','NONE',0,'失败时告警级别','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
-INSERT INTO `linkis_stream_job_config_def` VALUES (16,'wds.linkis.flink.alert.failure.user','失败时告警用户','NONE',0,'失败时告警用户','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (16,'wds.linkis.flink.alert.failure.user','失败时告警用户','INPUT',0,'失败时告警用户','None',NULL,'',1,1,NULL,NULL,'',NULL,0,0);
INSERT INTO `linkis_stream_job_config_def` VALUES (32,'wds.linkis.flink.authority','权限设置','NONE',0,'权限设置','None',NULL,'',1,0,NULL,NULL,'',NULL,0,0);
INSERT INTO `linkis_stream_job_config_def` VALUES (33,'wds.linkis.flink.authority.visible','可见人员','INPUT',0,'可见人员','None',NULL,'',1,1,NULL,NULL,'',32,0,0);
INSERT INTO `linkis_stream_job_config_def` VALUES (34,'wds.linkis.rm.yarnqueue','使用Yarn队列','INPUT',0,'使用Yarn队列','None',NULL,'',1,1,NULL,NULL,'',1,0,0);
INSERT INTO `linkis_stream_job_config_def` VALUES (35,'wds.linkis.flink.app.fail-restart.switch','作业失败自动拉起开关','SELECT',1,'作业失败自动拉起开关','None',NULL,'',1,1,NULL,'OFF','ON,OFF',8,0,0);
-INSERT INTO `linkis_stream_job_config_def` VALUES (36,'wds.linkis.flink.app.start-auto-restore.switch','作业启动状态自恢复','SELECT',2,'作业启动状态自恢复','None',NULL,'',1,1,NULL,'ON','ON,OFF',8,0,0);
\ No newline at end of file
+INSERT INTO `linkis_stream_job_config_def` VALUES (36,'wds.linkis.flink.app.start-auto-restore.switch','作业启动状态自恢复','SELECT',2,'作业启动状态自恢复','None',NULL,'',1,1,NULL,'ON','ON,OFF',8,0,0);
+INSERT INTO `linkis_stream_job_config_def` VALUES (38, 'linkis.ec.app.manage.mode', '管理模式', 'SELECT', 3, 'EngineConn管理模式', 'None', NULL, '', 1, 1, NULL, 'attach', 'detach,attach', 8, 0, 0);
diff --git a/db/upgrade/0.3.0/upgrade_ddl.sql b/db/upgrade/0.3.0/upgrade_ddl.sql
new file mode 100644
index 000000000..8793d11dc
--- /dev/null
+++ b/db/upgrade/0.3.0/upgrade_ddl.sql
@@ -0,0 +1,5 @@
+ALTER TABLE linkis_stream_job ADD COLUMN `current_version` varchar(50);
+ALTER TABLE linkis_stream_job_version ADD COLUMN IF NOT EXISTS `manage_mode` varchar(30) NOT NULL DEFAULT 'EngineConn';
+
+ALTER TABLE linkis_stream_project_files ADD update_time datetime NULL;
+ALTER TABLE linkis_stream_project_files ADD md5 varchar(100) NULL;
\ No newline at end of file
diff --git a/db/upgrade/0.3.0/upgrade_dml.sql b/db/upgrade/0.3.0/upgrade_dml.sql
new file mode 100644
index 000000000..ebe8ec5cc
--- /dev/null
+++ b/db/upgrade/0.3.0/upgrade_dml.sql
@@ -0,0 +1,7 @@
+UPDATE linkis_stream_job j SET current_version = (SELECT version FROM linkis_stream_job_version v WHERE v.job_id = j.id ORDER BY id DESC limit 1);
+
+UPDATE linkis_stream_job_config_def
+SET `key`='wds.linkis.flink.alert.failure.user', name='失败时告警用户', `type`='INPUT', sort=0, description='失败时告警用户', validate_type='None', validate_rule=NULL, `style`='', visiable=1, `level`=1, unit=NULL, default_value='', ref_values='', parent_ref=8, required=0, is_temp=0
+WHERE id=16;
+
+INSERT INTO `linkis_stream_job_config_def` VALUES (38, 'linkis.ec.app.manage.mode', '管理模式', 'SELECT', 3, 'EngineConn管理模式', 'None', NULL, '', 1, 1, NULL, 'attach', 'detach,attach', 8, 0, 0);
diff --git a/docs/en_US/0.2.0/StreamisDeployment.md b/docs/en_US/0.3.0/StreamisDeployment.md
similarity index 77%
rename from docs/en_US/0.2.0/StreamisDeployment.md
rename to docs/en_US/0.3.0/StreamisDeployment.md
index af76c7223..df914ba62 100644
--- a/docs/en_US/0.2.0/StreamisDeployment.md
+++ b/docs/en_US/0.3.0/StreamisDeployment.md
@@ -1,7 +1,7 @@
# Streamis installation and deployment documentation
## 1. Component introduction
-Streamis0.2.0 provides the Streamis-JobManager component, the role of the component is
+Streamis0.3.0 provides the Streamis-JobManager component, the role of the component is
1. Publish streaming applications
2. Set streaming application parameters, such as the number of Flink slots, checkpoint related parameters, etc.
3. Manage streaming applications (e.g. start and stop)
@@ -40,10 +40,10 @@ After the compilation is successful, the installation package will be generated
- JDK (above 1.8.0_141), [How to install JDK](https://www.runoob.com/java/java-environment-setup.html)
### 3.2 Linkis and DSS environments
-- The execution of Streamis depends on Linkis, and it needs to be version 1.1.1 and above, so you need to install Linkis above 1.1.1 and ensure that the Flink engine can be used normally.Some functions need to be supported by linkis-1.1.2.
-- Datasphere studio (> =1.1.0), the development and debugging of streaming jobs depend on DSS scriptis, and the streaming production center needs to be embedded in the DSS engineering framework system, so it depends on * * dss-1.1.0 * * and above.
+- The execution of Streamis depends on Linkis, and it needs to be version 1.4.0 and above, so you need to install Linkis above 1.4.0 and ensure that the Flink engine can be used normally.Some functions need to be supported by linkis-1.4.0.
+- Datasphere studio (> =1.1.2), the development and debugging of streaming jobs depend on DSS scriptis, and the streaming production center needs to be embedded in the DSS engineering framework system, so it depends on * * dss-1.1.2 * * and above.
-Before the formal installation of streamis, please install linkis-1.1.1 and dss-1.1.0 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to the [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md).
+Before the formal installation of streamis, please install linkis-1.4.0 and dss-1.1.2 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to the [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md).
How to verify that DSS and linkis are basically available? You can create a flinksql script on DSS scriptis and execute it. If flinksql can execute correctly and return the result set, it means that the DSS and linkis environments are available.
@@ -71,9 +71,47 @@ vi conf/db.sh
```bash
vi conf/config.sh
-#Configure service port information
-#Configure Linkis service information
```
+```shell script
+### deploy user
+deployUser=hadoop
+
+### ssh port
+SSH_PORT=22
+
+##The Max Heap size for the JVM
+SERVER_HEAP_SIZE="512M"
+
+##The Port of Streamis
+STREAMIS_PORT=9400
+
+### The install home path of STREAMIS,Must provided
+STREAMIS_INSTALL_HOME=/appcom/Install/streamis
+
+### Linkis EUREKA information. # Microservices Service Registration Discovery Center
+EUREKA_INSTALL_IP=127.0.0.1
+EUREKA_PORT=20303
+
+### Linkis Gateway information
+GATEWAY_INSTALL_IP=127.0.0.1
+GATEWAY_PORT=9001
+
+################### The install Configuration of all Micro-Services #####################
+#
+# NOTICE:
+# 1. If you just wanna try, the following micro-service configuration can be set without any settings.
+# These services will be installed by default on this machine.
+# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install
+# the following microservice parameters
+#
+
+STREAMIS_SERVER_INSTALL_IP=127.0.0.1
+STREAMIS_SERVER_INSTALL_PORT=9400
+
+STREAMIS_VERSION=0.3.0
+STREAMIS_FILE_NAME="STREAMIS-$STREAMIS_VERSION"
+```
+
4.Installation
```bash
sh bin/install.sh
@@ -156,10 +194,10 @@ sudo nginx -s reload
## 5. Access to DSS
-If you want to use the streamis0.2.0 front end normally, you also need to install the DSS StreamisAppConn plug-in. Please refer to: [StreamisAppConn plug-in installation document](development/StreamisAppConnInstallationDocument.md)
+If you want to use the streamis0.3.0 front end normally, you also need to install the DSS StreamisAppConn plug-in. Please refer to: [StreamisAppConn plug-in installation document](development/StreamisAppConnInstallationDocument.md)
## 6. Linkis Flink engine compilation and installation
-If you want to run streamis0.2.0 normally, you also need to install the linkis Flink engine. Please refer to: [linkis Flink engine installation document](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/)
+If you want to run streamis0.3.0 normally, you also need to install the linkis Flink engine. Please refer to: [linkis Flink engine installation document](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/)
## 7. Streamis component upgrade document / script
-If you want to upgrade from a lower version of streamis to streamis0.2.0, please refer to: [streamis upgrade document](development/StreamisUpgradeDocumentation.md)
+If you want to upgrade from a lower version of streamis to streamis0.3.0, please refer to: [streamis upgrade document](development/StreamisUpgradeDocumentation.md)
diff --git a/docs/en_US/0.3.0/StreamisUpgrade.md b/docs/en_US/0.3.0/StreamisUpgrade.md
new file mode 100644
index 000000000..d9a821e71
--- /dev/null
+++ b/docs/en_US/0.3.0/StreamisUpgrade.md
@@ -0,0 +1,59 @@
+# Streamis Configuration and Script Upgrade Documentation
+
+## Configuration changes
+
+not involved
+
+## SQL changes
+
+A structure modification involving two data tables and a data modification of one data table have been added to the project version branch and included when packaging
+
+![](../../images/0.3.0/upgrade/upgrade-to-0.3.0.png)
+
+### Base script changes
+
+#### 1、streamis_ddl.sql
+
+```yaml
+# Add two fields update_time and md5 to the linkis_stream_project_files table
+ CREATE TABLE `linkis_stream_project_files` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `file_name` varchar(500) DEFAULT NULL,
+ `version` varchar(30) DEFAULT NULL COMMENT '文件版本号,由用户上传时指定的',
+ `store_path` varchar(100) DEFAULT NULL COMMENT '如:{"resource":"22edar22", "version": "v0001"}',
+ `store_type` varchar(20) DEFAULT NULL COMMENT '存储类型,一般就是bml',
+ `project_name` varchar(50) DEFAULT NULL,
+ `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+ `create_by` varchar(32) DEFAULT NULL,
+ `comment` varchar(255) DEFAULT NULL COMMENT '说明',
+ `update_time` datetime DEFAULT NULL,
+ `md5` varchar(100) DEFAULT NULL COMMENT '文件md5',
+ PRIMARY KEY (`id`) USING BTREE
+ ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表';
+
+```
+
+#### 2、streamis_dml.sql
+
+not involved
+
+### upgrade script
+
+#### 1、ddl upgrade script
+
+```yaml
+ALTER TABLE linkis_stream_project_files ADD update_time datetime NULL;
+ALTER TABLE linkis_stream_project_files ADD md5 varchar(100) NULL;
+```
+
+#### 2、dml upgrade script
+
+```yaml
+UPDATE linkis_stream_job_config_def
+SET `key`='wds.linkis.flink.alert.failure.user', name='失败时告警用户', `type`='INPUT', sort=0, description='失败时告警用户', validate_type='None', validate_rule=NULL, `style`='', visiable=1, `level`=1, unit=NULL, default_value='', ref_values='', parent_ref=8, required=0, is_temp=0
+WHERE id=16;
+
+INSERT INTO `linkis_stream_job_config_def` VALUES (38, 'linkis.ec.app.manage.mode', '管理模式', 'SELECT', 3, 'EngineConn管理模式', 'None', NULL, '', 1, 1, NULL, 'attach', 'detach,attach', 8, 0, 0);
+
+```
+
diff --git a/docs/en_US/0.3.0/StreamisVersionFeatures.md b/docs/en_US/0.3.0/StreamisVersionFeatures.md
new file mode 100644
index 000000000..47e8f9050
--- /dev/null
+++ b/docs/en_US/0.3.0/StreamisVersionFeatures.md
@@ -0,0 +1,63 @@
+### 1. Separate Feature Refactoring
+Version 0.3.0 refactors the features of the separate flink engine and supports separate submissions. After submission, the linkis flink engine will automatically exit and release jvm resources after the yarn flink application is successfully created. streamis continues to manage the lifecycle of flink applications on yarn.
+![1.1](../../images/版本功能介绍/1.1.png)
+
+From 0.3.0 onwards, the task is non-separated by default. If you want to change to separated, there are two ways as follows
+###### ①When importing production parameters, add the configuration parameter linkis.ec.app.manage.mode , value detach - detached , attach - non-detached (default)
+```
+"wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"ON",
+ "wds.linkis.flink.alert.failure.user":"",
+ "wds.linkis.flink.app.fail-restart.switch":"OFF",
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF",
+ "linkis.ec.app.manage.mode": "detach"
+ }
+```
+
+###### ②On the task configuration page, change the management mode to detach
+![1.2](../../images/版本功能介绍/1.2.png)
+
+
+
+
+### 2. Label filtering
+Support for continuing label filtering tasks
+
+![2.1](../../images/版本功能介绍/2.1.png)
+
+###3. Label batch modification
+
+First click batch modification, then select multiple tasks, and click Modify tab. Enter the content of the new label in the pop-up window, supporting uppercase and lowercase letters, numbers, commas, and underscores.
+
+![3.1](../../images/版本功能介绍/3.1.png)
+![3.2](../../images/版本功能介绍/3.2.png)
+
+
+### 4. Upload job zip package to automatically clean up
+After importing tasks, the zip cache on the server will be automatically cleared
+
+### 5. 任务启动前检查失败告警人
+
+The failure alerter must be a non-hduser user, otherwise it cannot be started.
+You can configure the alerter when importing a task, or on the task configuration interface:
+###### ①Import the task, configure the alarm person in the production configuration wds.linkis.flink.alert.failure.user
+```
+"wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"ON",
+ "wds.linkis.flink.alert.failure.user":"",
+ "wds.linkis.flink.app.fail-restart.switch":"OFF",
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF",
+ "linkis.ec.app.manage.mode": "detach"
+ }
+```
+
+### 6. Upload project resource file and add md5
+On the project resource management page, the uploaded file will have an update time and a corresponding md5 value, and the md5 value is consistent with the result of the linux command line
+
+![6.1](../../images/版本功能介绍/6.1.png)
+![6.2](../../images/版本功能介绍/6.2.png)
+
+### 7. When starting a job, it will automatically check the application with the same name on yarn
+Click on the application name to jump to the yarn page
+![7.1](../../images/版本功能介绍/7.1.png)
+
diff --git a/docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md b/docs/en_US/0.3.0/architecture/StreamisAppConnDesignDocument.md
similarity index 100%
rename from docs/en_US/0.2.0/architecture/StreamisAppConnDesignDocument.md
rename to docs/en_US/0.3.0/architecture/StreamisAppConnDesignDocument.md
diff --git a/docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md b/docs/en_US/0.3.0/architecture/StreamisAuthenticationDesignDocument.md
similarity index 100%
rename from docs/en_US/0.2.0/architecture/StreamisAuthenticationDesignDocument.md
rename to docs/en_US/0.3.0/architecture/StreamisAuthenticationDesignDocument.md
diff --git a/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md b/docs/en_US/0.3.0/development/StreamisAppConnInstallationDocument.md
similarity index 96%
rename from docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md
rename to docs/en_US/0.3.0/development/StreamisAppConnInstallationDocument.md
index dcee32288..e19a371db 100644
--- a/docs/en_US/0.2.0/development/StreamisAppConnInstallationDocument.md
+++ b/docs/en_US/0.3.0/development/StreamisAppConnInstallationDocument.md
@@ -1,7 +1,7 @@
-Streamisappconn installation document this article mainly introduces the deployment, configuration and installation of streamisappconn in DSS (datasphere studio) 1.1.0
+Streamisappconn installation document this article mainly introduces the deployment, configuration and installation of streamisappconn in DSS (datasphere studio) 1.1.2
# 1. Preparation for deploying streamisappconn
-Before deploying streamisappconn, please complete the installation of streamis0.2.0 and other related components, and ensure that the basic functions of the project are available.
+Before deploying streamisappconn, please complete the installation of streamis 0.3.0 and other related components, and ensure that the basic functions of the project are available.
# 2. Download and compilation of streamisappconn plug-in
1) Download binary package
diff --git a/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md b/docs/en_US/0.3.0/development/StreamisUpgradeDocumentation.md
similarity index 88%
rename from docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md
rename to docs/en_US/0.3.0/development/StreamisUpgradeDocumentation.md
index db816d7ac..111b37edd 100644
--- a/docs/en_US/0.2.0/development/StreamisUpgradeDocumentation.md
+++ b/docs/en_US/0.3.0/development/StreamisUpgradeDocumentation.md
@@ -1,7 +1,7 @@
-Streamis upgrade document. This article mainly introduces the upgrade steps of adapting DSS1.1.0 and linkis1.1.1 based on the original installation of Streamis service. The biggest difference between Streamis 0.2.0 and Streamis 0.1.0 is that it accesses DSS appconn and optimizes the start and stop of jobs.
+Streamis upgrade document. This article mainly introduces the upgrade steps of adapting DSS1.1.2 and linkis1.4.0 based on the original installation of Streamis service. The biggest difference between Streamis 0.3.0 and Streamis 0.1.0 is that it accesses DSS appconn and optimizes the start and stop of jobs.
# 1. Work before upgrading streamis
-Before upgrading Streamis, please install linkis1.1.1 and DSS1.1.0 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md).
+Before upgrading Streamis, please install linkis1.4.0 and DSS1.1.2 or above, and ensure that the linkis Flink engine and DSS can be used normally. For the installation of DSS and linkis, please refer to [dss & linkis one click installation and deployment document](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md).
# 2. Streamis upgrade steps
diff --git a/docs/en_US/userManual/StreamisUserManual.md b/docs/en_US/userManual/StreamisUserManual.md
index 974a27651..8bbc8dead 100644
--- a/docs/en_US/userManual/StreamisUserManual.md
+++ b/docs/en_US/userManual/StreamisUserManual.md
@@ -2,7 +2,7 @@
## 1. Preface
- This article is a quick start document for Streamis 0.2.0, which covers the basic usage process of Stremis. More details on operation and usage will be provided in the user documentation.
+ This article is a quick start document for Streamis 0.3.0, which covers the basic usage process of Stremis. More details on operation and usage will be provided in the user documentation.
## 2. Streamis entrance
diff --git a/docs/images/0.3.0/upgrade/upgrade-to-0.3.0.png b/docs/images/0.3.0/upgrade/upgrade-to-0.3.0.png
new file mode 100644
index 000000000..78fe2ede4
Binary files /dev/null and b/docs/images/0.3.0/upgrade/upgrade-to-0.3.0.png differ
diff --git a/docs/images/Upgrade2.png b/docs/images/Upgrade2.png
new file mode 100644
index 000000000..91320c26a
Binary files /dev/null and b/docs/images/Upgrade2.png differ
diff --git a/docs/images/default_config1.png b/docs/images/default_config1.png
new file mode 100644
index 000000000..dfeb8e3a5
Binary files /dev/null and b/docs/images/default_config1.png differ
diff --git a/docs/images/default_config2.png b/docs/images/default_config2.png
new file mode 100644
index 000000000..db6eb2896
Binary files /dev/null and b/docs/images/default_config2.png differ
diff --git a/docs/images/flinkjar.png b/docs/images/flinkjar.png
new file mode 100644
index 000000000..ab136a118
Binary files /dev/null and b/docs/images/flinkjar.png differ
diff --git a/docs/images/flinkjar_metajson.png b/docs/images/flinkjar_metajson.png
new file mode 100644
index 000000000..651d18878
Binary files /dev/null and b/docs/images/flinkjar_metajson.png differ
diff --git a/docs/images/flinkjar_zip.png b/docs/images/flinkjar_zip.png
new file mode 100644
index 000000000..5a391e879
Binary files /dev/null and b/docs/images/flinkjar_zip.png differ
diff --git a/docs/images/flinksql_job_use_demo3.png b/docs/images/flinksql_job_use_demo3.png
new file mode 100644
index 000000000..d23f6984b
Binary files /dev/null and b/docs/images/flinksql_job_use_demo3.png differ
diff --git a/docs/images/inspect1.png b/docs/images/inspect1.png
new file mode 100644
index 000000000..17dfbea9d
Binary files /dev/null and b/docs/images/inspect1.png differ
diff --git a/docs/images/job_import.png b/docs/images/job_import.png
new file mode 100644
index 000000000..2031a6f07
Binary files /dev/null and b/docs/images/job_import.png differ
diff --git a/docs/images/job_log1.png b/docs/images/job_log1.png
new file mode 100644
index 000000000..7c42453bc
Binary files /dev/null and b/docs/images/job_log1.png differ
diff --git a/docs/images/job_log2.png b/docs/images/job_log2.png
new file mode 100644
index 000000000..e19bfc511
Binary files /dev/null and b/docs/images/job_log2.png differ
diff --git a/docs/images/job_resource1.png b/docs/images/job_resource1.png
new file mode 100644
index 000000000..a5a5b7676
Binary files /dev/null and b/docs/images/job_resource1.png differ
diff --git a/docs/images/job_resource2.png b/docs/images/job_resource2.png
new file mode 100644
index 000000000..cd494b22f
Binary files /dev/null and b/docs/images/job_resource2.png differ
diff --git a/docs/images/job_resource3.png b/docs/images/job_resource3.png
new file mode 100644
index 000000000..489edcd85
Binary files /dev/null and b/docs/images/job_resource3.png differ
diff --git a/docs/images/job_start1.png b/docs/images/job_start1.png
new file mode 100644
index 000000000..49ec50077
Binary files /dev/null and b/docs/images/job_start1.png differ
diff --git a/docs/images/job_start2.png b/docs/images/job_start2.png
new file mode 100644
index 000000000..0cc83c020
Binary files /dev/null and b/docs/images/job_start2.png differ
diff --git a/docs/images/job_stop1.png b/docs/images/job_stop1.png
new file mode 100644
index 000000000..f3026af43
Binary files /dev/null and b/docs/images/job_stop1.png differ
diff --git a/docs/images/savepoint1.png b/docs/images/savepoint1.png
new file mode 100644
index 000000000..e7b6bb4de
Binary files /dev/null and b/docs/images/savepoint1.png differ
diff --git a/docs/images/savepoint2.png b/docs/images/savepoint2.png
new file mode 100644
index 000000000..023ea5b34
Binary files /dev/null and b/docs/images/savepoint2.png differ
diff --git a/docs/images/update_sql.png b/docs/images/update_sql.png
new file mode 100644
index 000000000..39d58bc8c
Binary files /dev/null and b/docs/images/update_sql.png differ
diff --git a/docs/images/update_version1.png b/docs/images/update_version1.png
new file mode 100644
index 000000000..fb62806e3
Binary files /dev/null and b/docs/images/update_version1.png differ
diff --git a/docs/images/update_version2.png b/docs/images/update_version2.png
new file mode 100644
index 000000000..cea7eb7e7
Binary files /dev/null and b/docs/images/update_version2.png differ
diff --git a/docs/images/update_version3.png b/docs/images/update_version3.png
new file mode 100644
index 000000000..061bf45db
Binary files /dev/null and b/docs/images/update_version3.png differ
diff --git a/docs/images/update_version4.png b/docs/images/update_version4.png
new file mode 100644
index 000000000..a02e8886b
Binary files /dev/null and b/docs/images/update_version4.png differ
diff --git a/docs/images/update_version5.png b/docs/images/update_version5.png
new file mode 100644
index 000000000..50ccb193d
Binary files /dev/null and b/docs/images/update_version5.png differ
diff --git a/docs/images/upgrade1.png b/docs/images/upgrade1.png
new file mode 100644
index 000000000..bfcaa3cc3
Binary files /dev/null and b/docs/images/upgrade1.png differ
diff --git a/docs/images/upgrade3.png b/docs/images/upgrade3.png
new file mode 100644
index 000000000..11ee04c30
Binary files /dev/null and b/docs/images/upgrade3.png differ
diff --git a/docs/images/upgrade4.png b/docs/images/upgrade4.png
new file mode 100644
index 000000000..668da6637
Binary files /dev/null and b/docs/images/upgrade4.png differ
diff --git a/docs/images/upgrade5.png b/docs/images/upgrade5.png
new file mode 100644
index 000000000..682150d47
Binary files /dev/null and b/docs/images/upgrade5.png differ
diff --git a/docs/images/upgrade6.png b/docs/images/upgrade6.png
new file mode 100644
index 000000000..1bd933424
Binary files /dev/null and b/docs/images/upgrade6.png differ
diff --git a/docs/images/upgrade7.png b/docs/images/upgrade7.png
new file mode 100644
index 000000000..83fcaa437
Binary files /dev/null and b/docs/images/upgrade7.png differ
diff --git a/docs/images/upgrade8.png b/docs/images/upgrade8.png
new file mode 100644
index 000000000..e4e7bc247
Binary files /dev/null and b/docs/images/upgrade8.png differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/1.1.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/1.1.png"
new file mode 100644
index 000000000..86fa71847
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/1.1.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/1.2.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/1.2.png"
new file mode 100644
index 000000000..6838087e8
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/1.2.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/2.1.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/2.1.png"
new file mode 100644
index 000000000..cd923a34a
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/2.1.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/3.1.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/3.1.png"
new file mode 100644
index 000000000..d7bb10d1a
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/3.1.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/3.2.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/3.2.png"
new file mode 100644
index 000000000..eff0db71a
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/3.2.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/6.1.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/6.1.png"
new file mode 100644
index 000000000..3018dff6b
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/6.1.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/6.2.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/6.2.png"
new file mode 100644
index 000000000..61fe6f53a
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/6.2.png" differ
diff --git "a/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/7.1.png" "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/7.1.png"
new file mode 100644
index 000000000..dd436f5a2
Binary files /dev/null and "b/docs/images/\347\211\210\346\234\254\345\212\237\350\203\275\344\273\213\347\273\215/7.1.png" differ
diff --git "a/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md" "b/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md"
deleted file mode 100644
index cf5a89b86..000000000
--- "a/docs/zh_CN/0.2.0/development/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md"
+++ /dev/null
@@ -1,45 +0,0 @@
-Streamis 升级文档,本文主要介绍在原有安装Streamis服务的基础上适配DSS1.1.0和Linkis1.1.1的升级步骤,Streamis0.2.0相对与Streamis0.1.0版本最大的区别在于接入了DSS AppConn,对job的启停做了优化。
-
-# 1.升级Streamis前的工作
-您在升级Streamis之前,请先安装 Linkis1.1.1 和 DSS1.1.0 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。
-
-# 2.Streamis升级步骤
-
-## 安装StreamisAppConn
-
-1)删除旧版本StreamisAppConn包
-
-进入下列目录,找到streamis的appconn文件夹并删除,如果存在的话:
-```shell script
-{DSS_Install_HOME}/dss/dss-appconns
-```
-
-2)StreamisAppConn安装部署
-
-安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](development/StreamisAppConn安装文档.md)
-
-## 安装Streamis后端
-将获取到的安装包中lib更新到Streamis安装目录下的路径 `streamis-server/lib` 中,`streamis-server/conf`下的文件内容可根据需要进行更新。
-
-进入安装目录下,执行更新脚本,完成对数据库表结构和数据的更新:
-```shell script
-cd {Streamis_Install_HOME}
-sh bin/upgrade.sh
-```
-
-再通过以下命令完成 Streamis Server 的更新重启:
-```shell script
-cd {Streamis_Install_HOME}/streamis-server
-sh bin/stop-streamis-server.sh
-sh bin/start-streamis-server.sh
-```
-
-## 安装Streamis前端
-先删除旧版本前端目录文件夹,再替换为新的前端安装包
-```
-mkdir ${STREAMIS_FRONT_PATH}
-cd ${STREAMIS_FRONT_PATH}
-#1.删除前端目录文件夹
-#2.放置前端包
-unzip streamis-${streamis-version}.zip
-```
\ No newline at end of file
diff --git "a/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md" "b/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md"
deleted file mode 100644
index 864178179..000000000
--- "a/docs/zh_CN/0.2.0/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md"
+++ /dev/null
@@ -1,209 +0,0 @@
-# Streamis快速入门
-
-## 1. 前言
-
- 本文是Streamis0.2.0的快速入门文档,涵盖了Stremis的基本使用流程,更多的操作使用细节,将会在用户使用文档中提供。
-
-
-## 2. Streamis整合至DSS
-
- 为了方便用户使用,**Streamis系统以DSS组件的形式嵌入DSS系统中**
-
-##### 2.1 **如何接入?**
-
-按照 [StreamisAppConn安装文档](../development/StreamisAppConn安装文档.md) 安装部署StreamisAppConn成功后,Streamis系统会自动嵌入DSS系统中。
-
-##### 2.2 如何验证 DSS 已经成功集成了 Streamis?
-
-请进入 DSS 的工程首页,创建一个工程
-
-![image-20211230173334826](../../../images/create_stream_product_center.png)
-
-进入到工程里面,点击左上角按钮切换到”流式生产中心“,如果出现streamis的首页,则表示 DSS 已经成功集成了 Streamis。如下图:
-
-![image-20211230173839138](../../../images/stream_product_center.png)
-
-
-## 3. 核心指标
-
-进入到streamis首页,上半部显示的是核心指标。
-
-核心指标显示当前用户可查看到的上传到该项目执行的Flink任务的状态汇总,状态暂时有9种,显示状态名称和处于该状态的任务数量,具体内容如下图。
-
-![核心指标](../../../images/home_page.png)
-
-
图 3.1 首页核心指标
-
-# 4. 任务示例
-
- 主要演示案例从Script FlinkSQL开发,调试到Streamis发布的整个流程。
-
-## 4.1. Script开发SQL
-
- 顶部Scriptis菜单创建一个脚本文件,脚本类型选择Flink,如下图所示:
-
-![进入FlinkSQL](../../../images/enter_flinksql.png)
-
-![create_script_file.png](../../../images/create_script_file.png)
-
-编写FlinkSQL,source,sink,transform等。
-
-![flinksql_script_file](../../../images/flinksql_script_file.png)
-
-点击运行后,即可调试该脚本
-
-## 4.2. 发布至Streamis
-
-### 4.2.1 打包Streamis Job任务
-
- 流式应用物料包是指的按照Streamis打包规范,将元数据信息(流式应用描述信息),流式应用代码,流式应用使用到的物料等内容打包成zip包。zip具体格式如下:
-
- xxx.zip
- ├── meta.json
- ├── test.sql
- ├── test.jar
- ├── file3
-
-其中,meta.json是StreamisJob的元数据信息,其格式为:
-
-```
-{
- "projectName": "", # 项目名
- "jobName": "", # 作业名
- "jobType": "flink.sql", # 目前只支持flink.sql、flink.jar
- "tags": "", # 应用标签
- "description": "" # 作业描述,
- "jobContent": {
- # 不同的jobType,其内容各不相同,具体请往下看
- }
-}
-```
-
-!!!!!**特别需要注意的是:**
-
- **此处的projectName需要和您dss工程中创建的工程名一致,不然在streamis页面导入ZIP包时,刷新列表后会不会显示,因为两者的projectName不一致**
-
-如果jobType为"flink.sql",则jobContent为:
-
-```
-{
- "type": "" # file, bml or sql
- "sql": "select 1",
- "file": "test.sql",
- "resourceId": "",
- "version": ""
-}
-其中,如果type为"file",则只识别file字段;如果type为"sql",则只识别sql字段;如果type为"bml",则只识别resourceId和version字段。
-```
-
-如果jobType为"flink.jar",则jobContent为:
-
-```
-{
- "main.class.jar": "", # string。main class的jar,如:test.jar
- "main.class": "", # main class,如 com.webank.Test
- "args": "", # main class 的入参,即main函数的args,请以空格为分隔符
- "hdfs.jars"; [], # 依赖的HDFS jars,如:hdfs:///user/hadoop/test1.jar
- "dependency.jars": [], # 依赖的jars,如:test2.jar
- "resources": [] # 依赖的资源文件,如:test.properties
-}
-```
-
-### 4.2.2 示例
-
- streamisjobtest为flinksql文件,meta.json是该任务的元数据信息。
-
-![flinksql_job_use_demo](../../../images/flinksql_job_use_demo.png)
-
-
-
-![flinksql_job_use_demo2](../../../images/flinksql_job_use_demo2.png)
-
-将SQL文件和meta.json文件打包成一个zip文件,注意:只能打包成zip文件,其他格式如rar、7z等格式无法识别。
-
-如果上传zip文件出现下面错误,请调整下nginx的配置`vi /etc/nginx/conf.d/streamis.conf`,添加属性`client_max_body_size`,如下图所示。
-![upload_jobtask_error](../../../images/upload_jobtask_error.png)
-![upload_jobtask_error_solve](../../../images/upload_jobtask_error_solve.png)
------
-
-在streamis中将该zip包导入,导入任务后,任务的运行状态变成"未启动",版本会+1(导入新的job任务版本从1开始),最新发布时间会更新至最新时间。
-
-点击相应的作业名称、配置或左边3个竖点中(参数配置/告警配置/运行历史/运行日志)可进入job任务详情,点击 启动 可执行作业。
-
-点击左边3个竖点中 快照【savepoint】 可保存快照。
-
-![job_list](../../../images/job_list.png)
-
-点击批量操作,可选中多个作业任务重启,快照重启会先生成快照再重新启动,直接重启不会生成快照
-
-![jobbulk_operate](../../../images/jobbulk_operate.png)
-
-####
-
-
-# 5、Streamis任务介绍
-
-点击”作业名称“,可查看任务的详情,包括,运行情况、执行历史、配置、任务详情、告警等。
-
-## 5.1 运行情况
-
-![stream_job_detail](../../../images/stream_job_detail.png)
-
-## 5.2 执行历史
-
-打开执行历史可以查看该任务的历史运行情况,
-
-历史日志:只有正在运行的任务才能查看历史日志。
-
-历史日志中可以查看当前任务启动的flink引擎的日志,可以根据关键字等查看关键日志,点击查看最新日志,可以查看当前引擎的最新日志。
-
-![stream_job_history](../../../images/stream_job_history.png)
-
-## 5.3 配置
-
-给Streamis任务配置一些flink资源参数以及checkpoint的参数
-
-![image-20211231101503678](../../../images/stream_job_config_1.png)
-![image-20211231101503678](../../../images/stream_job_config_2.png)
-
-
-
-## 5.4任务详情
-
-
-
- 任务详情根据任务类型Flink Jar 和 Flink SQL分为两种显示界面。
-
-
-
-- **Flink Jar任务详情**
-
-![任务详情](../../../images/stream_job_flinkjar_jobcontent.png)
-
- Flink Jar任务详情展示了任务Jar包的内容和参数, 同时提供下载该Jar包的功能。
-
-
-
-- **Flink SQL任务详情**
-
-![任务详情](../../../images/stream_job_flinksql_jobcontent.png)
-
- Flink SQL任务详情展示了该任务的SQL语句。
-
-
-
-## 5.5 进入Yarn页面
-
-正在运行的Streamis任务可以通过该按钮进入到yarn管理界面上的查看flink任务运行情况。
-
-![image-20211231102020703](../../../images/image-20211231102020703.png)
-
-## 6 工程资源文件
-Streamis首页-核心指标右上角-工程资源文件。
-工程资源文件提供了上传和管理项目所需资源文件的功能,如下图所示:
-
-![project_source_file_list](../../../images/project_source_file_list.png)
-
-上传项目文件
-
-![project_source_file_import](../../../images/project_source_file_import.png)
diff --git a/docs/zh_CN/0.2.0/architecture/SUMMARY.md b/docs/zh_CN/architecture/SUMMARY.md
similarity index 100%
rename from docs/zh_CN/0.2.0/architecture/SUMMARY.md
rename to docs/zh_CN/architecture/SUMMARY.md
diff --git "a/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md" "b/docs/zh_CN/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
similarity index 99%
rename from "docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
rename to "docs/zh_CN/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
index d8bf3122c..c28b1a0f4 100644
--- "a/docs/zh_CN/0.2.0/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/architecture/Streamis\346\216\245\345\205\245AppConn\350\256\276\350\256\241\346\226\207\346\241\243.md"
@@ -1,7 +1,7 @@
# Streamis接入AppConn
## 总体流程图
-![Streamis接入DSS](../../../images/zh_CN/streamis_appconn.png)
+![Streamis接入DSS](../../images/zh_CN/streamis_appconn.png)
## DSS项目appconn插件streamis-appconn
diff --git "a/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md" "b/docs/zh_CN/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
similarity index 97%
rename from "docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
rename to "docs/zh_CN/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
index 02514a3e8..eee306524 100644
--- "a/docs/zh_CN/0.2.0/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/architecture/Streamis\351\211\264\346\235\203\350\256\276\350\256\241\346\226\207\346\241\243.md"
@@ -2,7 +2,7 @@
## 鉴权流程图
streamis存在需要鉴权的模块没有依赖streamis-project-server模块的情况,使用rest接口调用来处理鉴权。
-![Streamis项目鉴权操作](../../../images/zh_CN/streamis_project_privilege.png)
+![Streamis项目鉴权操作](../../images/zh_CN/streamis_project_privilege.png)
## 具体实现说明
根据当前用户名和项目ID/名称获取到所有权限的集合,如果权限集合中包含RELEASE权限,则拥有发布/编辑/查看的权限,如果权限集合中包含EDIT权限,则拥有编辑/查看的权限,如果权限集合中包含ACCESS权限,则拥有查看的权限;
diff --git a/docs/zh_CN/0.2.0/development/Interface_documentation/README.md b/docs/zh_CN/development/Interface_documentation/README.md
similarity index 100%
rename from docs/zh_CN/0.2.0/development/Interface_documentation/README.md
rename to docs/zh_CN/development/Interface_documentation/README.md
diff --git a/docs/zh_CN/0.2.0/development/Requirements_documentation/README.md b/docs/zh_CN/development/Requirements_documentation/README.md
similarity index 100%
rename from docs/zh_CN/0.2.0/development/Requirements_documentation/README.md
rename to docs/zh_CN/development/Requirements_documentation/README.md
diff --git a/docs/zh_CN/0.2.0/development/SUMMARY.md b/docs/zh_CN/development/SUMMARY.md
similarity index 100%
rename from docs/zh_CN/0.2.0/development/SUMMARY.md
rename to docs/zh_CN/development/SUMMARY.md
diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md b/docs/zh_CN/development/Specification_documentation/Exception_Throws.md
similarity index 100%
rename from docs/zh_CN/0.2.0/development/Specification_documentation/Exception_Throws.md
rename to docs/zh_CN/development/Specification_documentation/Exception_Throws.md
diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md b/docs/zh_CN/development/Specification_documentation/Log_out.md
similarity index 100%
rename from docs/zh_CN/0.2.0/development/Specification_documentation/Log_out.md
rename to docs/zh_CN/development/Specification_documentation/Log_out.md
diff --git a/docs/zh_CN/0.2.0/development/Specification_documentation/README.md b/docs/zh_CN/development/Specification_documentation/README.md
similarity index 75%
rename from docs/zh_CN/0.2.0/development/Specification_documentation/README.md
rename to docs/zh_CN/development/Specification_documentation/README.md
index 1208ce0a0..8babbd331 100644
--- a/docs/zh_CN/0.2.0/development/Specification_documentation/README.md
+++ b/docs/zh_CN/development/Specification_documentation/README.md
@@ -6,9 +6,8 @@
## 2. 代码提交规范
-但是在正式发布Release Notes时,为了保证Release Notes的完整性,请各位模块负责人按照需求文档,先提好各个 issue,并将 issue 加入到 [Project-0.2.0](https://github.com/WeBankFinTech/Streamis/projects/2)。
+但是在正式发布Release Notes时,为了保证Release Notes的完整性,请各位模块负责人按照需求文档,先提好各个 issue,并将 issue 加入到开发中的版本 [Project列表](https://github.com/WeBankFinTech/Streamis/projects?type=classic)。
-请注意:Streamis-0.2.0 使用 [Project-0.2.0](https://github.com/WeBankFinTech/Streamis/projects/2) 作为 DPMS 工具,来全程追踪和管理版本的进度。
## 2. 后台开发规范
diff --git "a/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md" "b/docs/zh_CN/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
similarity index 93%
rename from "docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
rename to "docs/zh_CN/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
index acfd69636..5957daaee 100644
--- "a/docs/zh_CN/0.2.0/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/development/StreamisAppConn\345\256\211\350\243\205\346\226\207\346\241\243.md"
@@ -1,7 +1,7 @@
-StreamisAppConn安装文档 本文主要介绍在DSS(DataSphere Studio)1.1.0中StreamisAppConn的部署、配置以及安装
+StreamisAppConn安装文档 本文主要介绍在DSS(DataSphere Studio)1.1.2中StreamisAppConn的部署、配置以及安装
# 1.部署StreamisAppConn的准备工作
-您在部署StreamisAppConn之前,请安装完成Streamis0.2.0及其他相关组件的安装,并确保工程基本功能可用。
+您在部署StreamisAppConn之前,请安装完成Streamis0.3.0及其他相关组件的安装,并确保工程基本功能可用。
# 2.StreamisAppConn插件的下载和编译
1)下载二进制包
@@ -40,7 +40,7 @@ cd {DSS_INSTALL_HOME}/dss/bin
sh ./appconn-install.sh
# 脚本是交互式的安装方案,您需要输入字符串streamis以及streamis服务的ip和端口,即可以完成安装
# 这里的streamis端口是指前端端口,在nginx进行配置。而不是后端的服务端口
-```
+ ```
## 4.完成streamis-appconn的安装后,需要重启dss服务,才能最终完成插件的更新
### 4.1)使部署好的APPCONN生效
@@ -53,7 +53,7 @@ sh ./dss-start-all.sh
在安装部署完成streamis-appconn之后,可通过以下步骤初步验证streamis-appconn是否安装成功。
在DSS工作空间创建一个新的项目
-![DSS工作空间Streamis项目](../../../images/zh_CN/dss_streamis_project.png)
+![DSS工作空间Streamis项目](../../images/zh_CN/dss_streamis_project.png)
在streamis数据库查看是否同步创建项目,查询有记录说明appconn安装成功
```roomsql
diff --git a/docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md b/docs/zh_CN/development/Table_Structure_documentation/README.md
similarity index 100%
rename from docs/zh_CN/0.2.0/development/Table_Structure_documentation/README.md
rename to docs/zh_CN/development/Table_Structure_documentation/README.md
diff --git a/docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql b/docs/zh_CN/development/Table_Structure_documentation/db/streamis-jobmanager.sql
similarity index 100%
rename from docs/zh_CN/0.2.0/development/Table_Structure_documentation/db/streamis-jobmanager.sql
rename to docs/zh_CN/development/Table_Structure_documentation/db/streamis-jobmanager.sql
diff --git "a/docs/zh_CN/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md" "b/docs/zh_CN/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md"
new file mode 100644
index 000000000..515e8a5ac
--- /dev/null
+++ "b/docs/zh_CN/\344\275\277\347\224\250\346\226\207\346\241\243/Streamis\347\224\250\346\210\267\346\211\213\345\206\214.md"
@@ -0,0 +1,295 @@
+# Streamis快速入门
+
+## 1. 前言
+
+ 本文是Streamis0.3.0的快速入门文档,涵盖了Stremis的基本使用流程,更多的操作使用细节,将会在用户使用文档中提供。
+
+
+## 2. Streamis整合至DSS
+
+为了方便用户使用,**Streamis系统以DSS组件的形式嵌入DSS系统中**
+
+### 2.1 **如何接入?**
+
+按照 [StreamisAppConn安装文档](../development/StreamisAppConn安装文档.md) 安装部署StreamisAppConn成功后,Streamis系统会自动嵌入DSS系统中。
+
+### 2.2 如何验证 DSS 已经成功集成了 Streamis?
+
+请进入 DSS 的工程首页,创建一个工程
+
+![image-20211230173334826](../../images/create_stream_product_center.png)
+
+进入到工程里面,点击左上角按钮切换到”流式生产中心“,如果出现streamis的首页,则表示 DSS 已经成功集成了 Streamis。如下图:
+
+![image-20211230173839138](../../images/stream_product_center.png)
+
+
+## 3. 核心指标
+
+进入到streamis首页,上半部显示的是核心指标。
+
+核心指标显示当前用户可查看到的上传到该项目执行的Flink任务的状态汇总,状态暂时有9种,显示状态名称和处于该状态的任务数量,具体内容如下图。
+
+![核心指标](../../images/home_page.png)
+
+图 3.1 首页核心指标
+## 4. 任务示例
+
+ 主要演示案例从Script FlinkSQL开发,调试到Streamis发布的整个流程。
+
+### 4.1. FlinkSQL作业示例
+
+#### 4.1.1. Script开发SQL
+
+顶部Scriptis菜单创建一个脚本文件,脚本类型选择Flink,如下图所示:
+
+![进入FlinkSQL](../../images/enter_flinksql.png)
+
+![create_script_file.png](../../images/create_script_file.png)
+
+编写FlinkSQL,source,sink,transform等。
+
+![flinksql_script_file](../../images/flinksql_script_file.png)
+
+点击运行后,即可调试该脚本
+
+#### 4.1.2. 文件打包层级
+
+流式应用物料包是指的按照Streamis打包规范,将元数据信息(流式应用描述信息),流式应用代码,流式应用使用到的物料等内容打包成zip包。zip具体格式如下:
+
+```
+xxx.zip
+ ├── meta.json
+ ├── streamisjobtest.sql
+ ├── test.jar
+ ├── file3
+```
+
+meta.json是该任务的元数据信息,streamisjobtest为flinksql文件
+
+![flinksql_job_use_demo](../../images/flinksql_job_use_demo.png)
+
+![flinksql_job_use_demo3](../../images/flinksql_job_use_demo3.png)
+
+将SQL文件和meta.json文件打包成一个zip文件,注意:只能打包成zip文件,其他格式如rar、7z等格式无法识别
+
+### 4.2. FlinkJar作业示例
+
+#### 4.2.1. 本地开发Flink应用程序
+
+本地开发Flink应用程序,并打包成jar包形式
+
+![flinkJar](../../images/flinkJar.png)
+
+#### 4.2.2. 文件打包层级
+
+流式应用物料包是指的按照Streamis打包规范,将元数据信息(流式应用描述信息),流式应用代码,流式应用使用到的物料等内容打包成zip包。zip具体格式如下:
+
+```
+xxx.zip
+ ├── meta.json
+ ├── resource.txt
+```
+
+meta.json是该任务的元数据信息,resource.txt为flink应用使用到的资源文件
+
+![flinkjar_zip](../../images/flinkjar_zip.png)
+
+![flinkjar_metajson](../../images/flinkjar_metajson.png)
+
+### 4.3. meta.json文件格式详解
+
+meta.json是StreamisJob的元数据信息,其格式为:
+
+```
+{
+ "projectName": "", # 项目名
+ "jobName": "", # 作业名
+ "jobType": "flink.sql", # 目前只支持flink.sql、flink.jar
+ "tags": "", # 应用标签
+ "description": "" # 作业描述,
+ "jobContent": {
+ # 不同的jobType,其内容各不相同,具体请往下看
+ },
+ "jobConfig": {
+ "wds.linkis.flink.resource": {
+ "wds.linkis.flink.app.parallelism":"", # Parallelism并行度
+ "wds.linkis.flink.jobmanager.memory":"", # JobManager Memory(M)
+ "wds.linkis.flink.taskmanager.memory":"", # TaskManager Memory(M)
+ "wds.linkis.flink.taskmanager.numberOfTaskSlots":"", # TaskManager Slot数量
+ "wds.linkis.flink.taskmanager.cpus":"", # TaskManager CPUs
+ "wds.linkis.rm.yarnqueue":"" # Yarn队列
+ "flink.client.memory":"" # 客户端内存
+ },
+ "wds.linkis.flink.custom": {
+ # Flink作业相关参数
+ },
+ "wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"OFF", # Checkpoint开关
+ "wds.linkis.flink.app.fail-restart.switch":"OFF", # 作业失败自动拉起开关
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF" # 作业启动状态自恢复
+ }
+ }
+}
+```
+
+其中,jobConfig为从0.3.0版本开始支持的配置项,该配置项可以不填,有默认的配置项,具体如下:
+
+```yaml
+"jobConfig": {
+ "wds.linkis.flink.resource": {
+ "wds.linkis.flink.app.parallelism":"4",
+ "wds.linkis.flink.jobmanager.memory":"2048",
+ "wds.linkis.flink.taskmanager.memory":"4096",
+ "wds.linkis.flink.taskmanager.numberOfTaskSlots":"2",
+ "wds.linkis.flink.taskmanager.cpus":"2"
+ },
+ "wds.linkis.flink.custom": {
+ "stream.log.filter.keywords":"ERROR,WARN,INFO",
+ "security.kerberos.krb5-conf.path":"",
+ "demo.log.tps.rate":"20000",
+ "classloader.resolve-order":"parent-first",
+ "stream.log.debug":"true",
+ "security.kerberos.login.contexts":"",
+ "security.kerberos.login.keytab":"",
+ "security.kerberos.login.principal":""
+ },
+ "wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"OFF",
+ "wds.linkis.flink.app.fail-restart.switch":"OFF",
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF"
+ }
+ }
+```
+
+ ![default_config1](../../images/default_config1.png)
+
+ ![default_config2](../../images/default_config2.png)
+
+### 4.4. 作业发布至Streamis
+
+#### 4.4.1. 上传工程资源文件
+
+进入到DSS页面,新建工程后,选择“流式生产中心”进入,点击“工程资源文件”
+
+![job_resource1](../../images/job_resource1.png)
+
+点击“导入”进行资源包的导入,选择文件系统的资源包,并设置版本号
+
+![job_resource2](../../images/job_resource2.png)
+
+导入完成
+
+![job_resource3](../../images/job_resource3.png)
+
+#### 4.4.2. 上传作业的zip包
+
+进入到DSS页面,新建工程后,选择“流式生产中心”进入,点击“导入”,选择文件系统中打包好的zip文件进行上传
+
+![job_import](../../images/job_import.png)
+
+如果上传zip文件出现下面错误,请调整下nginx的配置`vi /etc/nginx/conf.d/streamis.conf`,添加属性`client_max_body_size`,如下图所示。
+![upload_jobtask_error](../../images/upload_jobtask_error.png)
+
+![upload_jobtask_error_solve](../../images/upload_jobtask_error_solve.png)
+
+在streamis中将该zip包导入,导入任务后,任务的运行状态变成"未启动",导入的新作业版本从v00001开始,最新发布时间会更新至最新时间。
+
+## 5. 工程资源文件
+
+Streamis首页-核心指标右上角-工程资源文件。
+工程资源文件提供了上传和管理项目所需资源文件的功能,如下图所示:
+
+![project_source_file_list](../../images/project_source_file_list.png)
+
+上传项目文件
+
+![project_source_file_import](../../images/project_source_file_import.png)
+
+## 6. Streamis作业介绍
+
+点击”作业名称“,可查看任务的详情,包括,运行情况、执行历史、配置、任务详情、告警等。
+
+### 6.1 运行情况
+
+![stream_job_detail](../../images/stream_job_detail.png)
+
+### 6.2 执行历史
+
+打开执行历史可以查看该任务的历史运行情况,
+
+历史日志:只有正在运行的任务才能查看历史日志。
+
+![stream_job_history](../../images/stream_job_history.png)
+
+历史日志中可以查看当前任务启动的flink引擎的日志,也可以查看当前任务启动的Yarn日志,可以根据关键字等查看关键日志,点击查看最新日志,可以查看当前引擎的最新日志。
+
+![job_log1](../../images/job_log1.png)
+
+![job_log2](../../images/job_log2.png)
+
+### 6.3 配置
+
+给Streamis任务配置一些flink资源参数以及checkpoint的参数
+
+![image-20211231101503678](../../images/stream_job_config_1.png)
+![image-20211231101503678](../../images/stream_job_config_2.png)
+
+### 6.4任务详情
+
+任务详情根据任务类型Flink Jar 和 Flink SQL分为两种显示界面。
+
+- **Flink Jar任务详情**
+
+![任务详情](../../images/stream_job_flinkjar_jobcontent.png)
+
+ Flink Jar任务详情展示了任务Jar包的内容和参数, 同时提供下载该Jar包的功能。
+
+- **Flink SQL任务详情**
+
+![任务详情](../../images/stream_job_flinksql_jobcontent.png)
+
+ Flink SQL任务详情展示了该任务的SQL语句。
+
+### 6.5 进入Yarn页面
+
+正在运行的Streamis任务可以通过该按钮进入到yarn管理界面上的查看flink任务运行情况。
+
+![image-20211231102020703](../../images/image-20211231102020703.png)
+
+## 7. Streamis作业生命周期管理
+
+### 7.1. 作业启动
+
+导入成功的任务初始状态为“未启动”,点击任务启动,启动完成后会刷新该作业在页面的状态为“运行中”
+
+![job_start1](../../images/job_start1.png)
+
+![job_start2](../../images/job_start2.png)
+
+作业启动会进行前置信息检查,会检查作业的版本信息和快照信息,当作业有检查信息需要用户确认时,则会弹出弹窗;对于批量重启的场景,可以勾选“确认所以批量作业”进行一次性确认
+
+![inspect1](../../images/inspect1.png)
+
+### 7.2. 作业停止
+
+点击“停止”,可以选择“直接停止”和“快照并停止”,“快照并停止”会生成快照信息,并展示出来
+
+![job_stop1](../../images/job_stop1.png)
+
+### 7.3. 保存快照功能
+
+点击相应的作业名称、配置或左边3个竖点中(参数配置/告警配置/运行历史/运行日志)可进入job任务详情,点击 启动 可执行作业
+
+点击作业左边的3个竖点中的“快照【savepoint】 ”可保存快照
+
+![savepoint1](../../images/savepoint1.png)
+
+![savepoint2](../../images/savepoint2.png)
+
+### 7.4. 批量操作功能
+
+点击“批量操作”,可选中多个作业任务重启,快照重启会先生成快照再重新启动,直接重启不会生成快照
+
+![jobbulk_operate](../../images/jobbulk_operate.png)
+
diff --git "a/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md" "b/docs/zh_CN/\345\256\211\350\243\205\346\226\207\346\241\243/AppConn\347\232\204\344\275\277\347\224\250.md"
similarity index 98%
rename from "docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md"
rename to "docs/zh_CN/\345\256\211\350\243\205\346\226\207\346\241\243/AppConn\347\232\204\344\275\277\347\224\250.md"
index 588eb4c1a..019df1d05 100644
--- "a/docs/zh_CN/0.2.0/AppConn\347\232\204\344\275\277\347\224\250.md"
+++ "b/docs/zh_CN/\345\256\211\350\243\205\346\226\207\346\241\243/AppConn\347\232\204\344\275\277\347\224\250.md"
@@ -12,8 +12,6 @@ StreamisAppConn是Streamis用来与DSS集成的一个AppConn,功能包括
| 三级规范-CRUDService | 支持流式编排创建、获取、更新、删除等操作 | DSS-Framework-Orchestrator-Server |
| 三级规范-ExportService和ImportService | 支持流式编排的导入导出 | DSS-Framework-Orchestrator-Server |
-
-
### 1.2部署
1. 编译
@@ -57,9 +55,8 @@ delete from `dss_onestop_menu_application` WHERE title_en = 'Streamis';
-### 1.3使用
-
## 2.Streamis DataSource AppConn
+
----------
### 2.1介绍
|实现的规范和Service | 功能 | 作用微服务 |
@@ -71,9 +68,8 @@ delete from `dss_onestop_menu_application` WHERE title_en = 'Streamis';
1. 实现三级规范的CRUDService,支持数据源节点的创建、获取、更新、删除等操作
2. 实现三级规范的ExportService和ImportService,支持数据源的导入导出
3. 实现三级规范的ExecutionService,支持数据源的执行
-### 2.2部署
-### 2.3使用
+
## 3.Streamis JobManager AppConn
@@ -86,5 +82,4 @@ StreamisJobManager AppConn与SchedulisAppConn的功能是类似的,主要是
| 工作流转换规范 | 支持将流式工作流转换成Linkis Flink引擎可以执行的流式应用 | DSS-Framework-Orchestrator-Server |
| 工作流发布规范 | 支持将转换之后的流式应用发布到Streamis-JobManager | DSS-Framework-Orchestrator-Server |
-### 3.2部署
diff --git "a/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md" "b/docs/zh_CN/\345\256\211\350\243\205\346\226\207\346\241\243/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
similarity index 74%
rename from "docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
rename to "docs/zh_CN/\345\256\211\350\243\205\346\226\207\346\241\243/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
index 0e1d7d8f6..1b66f4dc7 100644
--- "a/docs/zh_CN/0.2.0/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
+++ "b/docs/zh_CN/\345\256\211\350\243\205\346\226\207\346\241\243/Streamis\345\256\211\350\243\205\346\226\207\346\241\243.md"
@@ -1,17 +1,7 @@
# Streamis安装部署文档
-## 1.组件介绍
+## 1.代码编译
-Streamis0.2.0 提供了 Streamis-JobManager 流式生产中心,其作用主要有:
-
-1. 上传/更新流式应用
-2. 配置流式应用参数,如 Flink 的 Slot 数量、checkpoint相关参数等
-3. 管理流式应用,如启停、savepoint等
-4. 流式应用监控告警
-
-
-## 2.代码编译
-
**Streamis 无需手动编译,可以直接下载安装包进行部署,请 [点我下载安装包](https://github.com/WeBankFinTech/Streamis/releases)。**
如果您想自己编译 Streamis,可参考如下步骤进行。
@@ -36,26 +26,26 @@ npm run build
```
编译成功后,在 `${STREAMIS_CODE_HOME}/web` 目录下生成 `streamis-${streamis-version}-dist.zip`
-## 3.安装准备
+## 2.安装准备
-#### 3.1 基础环境安装
+#### 2.1 基础环境安装
下面的软件必须安装:
- MySQL (5.5+),[如何安装MySQL](https://www.runoob.com/mysql/mysql-install.html)
- JDK (1.8.0_141以上),[如何安装JDK](https://www.runoob.com/java/java-environment-setup.html)
-### 3.2 Linkis 和 DSS 环境
+#### 2.2 Linkis 和 DSS 环境
-- Linkis (>=1.1.1),Streamis 的执行依赖于 Linkis 的 Flink 引擎,并且依赖 **Linkis-1.1.1** 及以上版本,部分功能需要Linkis-1.1.2支持。
-- DataSphere Studio (>=1.1.0),Streamis 流式作业的开发和调试,依赖于 DSS-Scriptis,Streamis 流式生产中心则需嵌入到 DSS 工程框架体系之中,所以依赖于 **DSS-1.1.0** 及以上版本。
+- Linkis (>=1.4.0),Streamis 的执行依赖于 Linkis 的 Flink 引擎,并且依赖 **Linkis-1.4.0** 及以上版本。
+- DataSphere Studio (>=1.1.2),Streamis 流式作业的开发和调试,依赖于 DSS-Scriptis,Streamis 流式生产中心则需嵌入到 DSS 工程框架体系之中,所以依赖于 **DSS-1.1.2** 及以上版本。
-在正式安装 Streamis 之前,请先安装 Linkis1.1.1 和 DSS1.1.0 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。
+在正式安装 Streamis 之前,请先安装 Linkis1.4.0 和 DSS1.1.2 及以上版本,并且保证 Linkis Flink 引擎 和 DSS 可以正常使用,DSS 和 Linkis 安装,可参照 [DSS & Linkis 一键安装部署文档](https://github.com/WeBankFinTech/DataSphereStudio-Doc/blob/main/zh_CN/%E5%AE%89%E8%A3%85%E9%83%A8%E7%BD%B2/DSS%E5%8D%95%E6%9C%BA%E9%83%A8%E7%BD%B2%E6%96%87%E6%A1%A3.md)。
如何验证 DSS 和 Linkis 已基本可用?您可以在 DSS-Scriptis 上新建一个 flinksql 脚本并执行,如果 flinksql 能正确执行并返回结果集,表示 DSS 和 linkis 环境是可用的。
-## 4.安装和启动
+## 3.安装和启动
### 后台安装
@@ -70,16 +60,23 @@ tar -xvf wedatasphere-streamis-${streamis-version}-dist.tar.gz
2.修改数据库配置
-```shell script
+```yaml
vi conf/db.sh
-#配置基础的数据库信息
+```
+```shell script
+#配置基础的数据库信息
+MYSQL_HOST=${MYSQL_HOST}
+MYSQL_PORT=${MYSQL_PORT}
+MYSQL_DB=${MYSQL_DB}
+MYSQL_USER=${MYSQL_USER}
+MYSQL_PASSWORD=${MYSQL_PASSWORD}
```
3.修改基础配置文件
```shell script
- vi conf/config.sh
+vi conf/config.sh
```
```shell script
@@ -89,6 +86,9 @@ deployUser=hadoop
### ssh port
SSH_PORT=22
+##The Max Heap size for the JVM
+SERVER_HEAP_SIZE="512M"
+
##The Port of Streamis
STREAMIS_PORT=9400
@@ -103,6 +103,20 @@ EUREKA_PORT=20303
GATEWAY_INSTALL_IP=127.0.0.1
GATEWAY_PORT=9001
+################### The install Configuration of all Micro-Services #####################
+#
+# NOTICE:
+# 1. If you just wanna try, the following micro-service configuration can be set without any settings.
+# These services will be installed by default on this machine.
+# 2. In order to get the most complete enterprise-level features, we strongly recommend that you install
+# the following microservice parameters
+#
+
+STREAMIS_SERVER_INSTALL_IP=127.0.0.1
+STREAMIS_SERVER_INSTALL_PORT=9400
+
+STREAMIS_VERSION=0.3.0
+STREAMIS_FILE_NAME="STREAMIS-$STREAMIS_VERSION"
```
4.执行安装脚本
@@ -134,7 +148,7 @@ sh bin/start.sh
### 前端部署
1.安装nginx
-
+
```bash
sudo yum install -y nginx
```
@@ -195,12 +209,12 @@ server {
sudo nginx -s reload
```
-## 5. 接入DSS
+## 4. 接入DSS
-如您想正常使用 Streamis0.2.0 前端,还需安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](development/StreamisAppConn安装文档.md)
+如您想正常使用 Streamis0.3.0 前端,还需安装 DSS StreamisAppConn 插件,请参考: [StreamisAppConn 插件安装文档](../development/StreamisAppConn安装文档.md)
-## 6.Linkis Flink引擎编译安装
-如您想正常执行 Streamis0.2.0,还需安装 Linkis Flink 引擎,请参考: [Linkis Flink 引擎安装文档](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/)
+## 5.Linkis Flink引擎编译安装
+如您想正常执行 Streamis0.3.0,还需安装 Linkis Flink 引擎,请参考: [Linkis Flink 引擎安装文档](https://linkis.apache.org/zh-CN/docs/1.1.2/engine_usage/flink/)
-## 7.Streamis组件升级文档/脚本
-如您想从Streamis较低版本升级到 Streamis0.2.0 ,请参考:[Streamis升级文档](development/Streamis升级文档.md)
\ No newline at end of file
+## 6.Streamis组件升级文档/脚本
+如您想从Streamis较低版本升级到 Streamis0.3.0 ,请参考:[Streamis升级文档](../版本升级文档/0.3.0/Streamis升级文档.md)
\ No newline at end of file
diff --git "a/docs/zh_CN/\347\211\210\346\234\254\345\215\207\347\272\247\346\226\207\346\241\243/0.3.0/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md" "b/docs/zh_CN/\347\211\210\346\234\254\345\215\207\347\272\247\346\226\207\346\241\243/0.3.0/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..39033296e
--- /dev/null
+++ "b/docs/zh_CN/\347\211\210\346\234\254\345\215\207\347\272\247\346\226\207\346\241\243/0.3.0/Streamis\345\215\207\347\272\247\346\226\207\346\241\243.md"
@@ -0,0 +1,97 @@
+# 配置与脚本升级文档
+
+## 配置变更
+
+暂不涉及
+
+## SQL变更
+
+涉及两个数据表的结构修改以及一个数据表的数据修改,已添加到项目版本分支中,并在打包时进行包含
+
+![](../../../images/0.3.0/upgrade/upgrade-to-0.3.0.png)
+
+### 基础脚本变更
+
+#### 1、streamis_ddl.sql
+```yaml
+# 对linkis_stream_job表添加一个字段current_version
+ CREATE TABLE `linkis_stream_job` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `project_name` varchar(100) DEFAULT NULL,
+ `name` varchar(200) DEFAULT NULL,
+ `status` tinyint(1) DEFAULT '0' COMMENT '1:已完成 ,2:等待重启 ,3:告警 ,4:慢任务 ,5:运行中 ,6:失败任务',
+ `create_by` varchar(50) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `label` varchar(200) DEFAULT NULL,
+ `description` varchar(200) DEFAULT NULL,
+ `job_type` varchar(30) DEFAULT NULL COMMENT '目前只支持flink.sql、flink.jar、spark.jar',
+ `submit_user` varchar(100) DEFAULT NULL,
+ `workspace_name` varchar(50) DEFAULT NULL,
+ `current_version` varchar(50) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY(`project_name`, `name`)
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表';
+
+# 对linkis_stream_job_version表添加一个字段manage_mode
+CREATE TABLE `linkis_stream_job_version` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `job_id` varchar(50) DEFAULT NULL,
+ `version` varchar(20) DEFAULT NULL,
+ `source` varchar(255) DEFAULT NULL COMMENT '这个版本的来源,比如:用户上传,由某个历史版本回退回来的',
+ `job_content` text COMMENT '内容为meta.json',
+ `manage_mode` varchar(30) DEFAULT 'EngineConn' COMMENT 'Manage mode',
+ `comment` varchar(255) DEFAULT NULL,
+ `create_time` datetime DEFAULT NULL,
+ `create_by` varchar(32) DEFAULT NULL,
+ PRIMARY KEY (`id`) USING BTREE,
+ UNIQUE KEY(`job_id`, `version`)
+) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='作业表';
+
+
+
+# 对linkis_stream_project_files 表添加两个字段update_time 和 md5
+ CREATE TABLE `linkis_stream_project_files` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `file_name` varchar(500) DEFAULT NULL,
+ `version` varchar(30) DEFAULT NULL COMMENT '文件版本号,由用户上传时指定的',
+ `store_path` varchar(100) DEFAULT NULL COMMENT '如:{"resource":"22edar22", "version": "v0001"}',
+ `store_type` varchar(20) DEFAULT NULL COMMENT '存储类型,一般就是bml',
+ `project_name` varchar(50) DEFAULT NULL,
+ `create_time` datetime DEFAULT CURRENT_TIMESTAMP,
+ `create_by` varchar(32) DEFAULT NULL,
+ `comment` varchar(255) DEFAULT NULL COMMENT '说明',
+ `update_time` datetime DEFAULT NULL,
+ `md5` varchar(100) DEFAULT NULL COMMENT '文件md5',
+ PRIMARY KEY (`id`) USING BTREE
+ ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8 ROW_FORMAT=COMPACT COMMENT='项目表';
+
+```
+
+#### 2、streamis_dml.sql
+
+暂不涉及
+
+### 升级脚本
+
+#### 1、ddl升级脚本
+
+```yaml
+alter table linkis_stream_job add column `current_version` varchar(50);
+alter table linkis_stream_job_version add column `manage_mode` varchar(30) default `EngineConn`;
+ALTER TABLE linkis_stream_project_files ADD update_time datetime NULL;
+ALTER TABLE linkis_stream_project_files ADD md5 varchar(100) NULL;
+```
+
+#### 2、dml升级脚本
+
+```yaml
+UPDATE linkis_stream_job_config_def
+SET `key`='wds.linkis.flink.alert.failure.user', name='失败时告警用户', `type`='INPUT', sort=0, description='失败时告警用户', validate_type='None', validate_rule=NULL, `style`='', visiable=1, `level`=1, unit=NULL, default_value='', ref_values='', parent_ref=8, required=0, is_temp=0
+WHERE id=16;
+
+INSERT INTO `linkis_stream_job_config_def` VALUES (38, 'linkis.ec.app.manage.mode', '管理模式', 'SELECT', 3, 'EngineConn管理模式', 'None', NULL, '', 1, 1, NULL, 'attach', 'detach,attach', 8, 0, 0);
+
+update linkis_stream_job j set current_version = (select version from linkis_stream_job_version v where v.job_id = j.id order by id desc limit 1);
+
+```
+
diff --git "a/docs/zh_CN/\347\211\210\346\234\254\345\215\207\347\272\247\346\226\207\346\241\243/0.3.0/\346\226\260\347\211\210\346\234\254\345\212\237\350\203\275\347\211\271\346\200\247\346\226\207\346\241\243.md" "b/docs/zh_CN/\347\211\210\346\234\254\345\215\207\347\272\247\346\226\207\346\241\243/0.3.0/\346\226\260\347\211\210\346\234\254\345\212\237\350\203\275\347\211\271\346\200\247\346\226\207\346\241\243.md"
new file mode 100644
index 000000000..256a9c007
--- /dev/null
+++ "b/docs/zh_CN/\347\211\210\346\234\254\345\215\207\347\272\247\346\226\207\346\241\243/0.3.0/\346\226\260\347\211\210\346\234\254\345\212\237\350\203\275\347\211\271\346\200\247\346\226\207\346\241\243.md"
@@ -0,0 +1,223 @@
+# 新版本特性文档
+
+## 1、【流作业版本管理】streamis 应用初始化支持应用运行在线导入及版本管理
+
+流式作业信息列表界面;增加对返回的作业详细信息的versionForwards的字段的读取,该字段代表的含义是“已发布的领先版本的数量”,如果该字段的值大于0,则在界面上版本信息的右上角做(+{数量值})显示,如下:
+
+![upgrade1](../../../images/upgrade1.png)
+
+需要注意的,一般versionForwards字段大于零的情况,只发生在处于运行时状态的作业上,因为对运行时作业进行发布,作业的当前使用版本不会更新,所以已发布版本才会领先当前使用版本。
+
+当点击版本值之后,则会请求作业的版本列表接口,该接口为分页接口。拿到接口返回值后渲染表格列表,其中前端新增“版本状态”字段,默认值为"---",前端边渲染版本列表边用version字段的值和当前作业的使用版本做对比,如果一致,则版本状态更新为“使用中”(如上图)。
+
+## 2、【流作业版本管理】streamis作业任务启动增加启动检查
+
+目前前端上启动作业任务有两种方式:
+
+1)点击作业信息列表右侧的启动按钮,如下图:
+
+![upgrade2](../../../images/upgrade2.png)
+
+2)通过两个批量操作按钮:“快照重启”和“直接重启”,如下图:
+![upgrade3](../../../images/upgrade3.png)
+
+这里的逻辑是:
+
+1、快照重启和直接重启只有调用接口时snapshot参数的区别
+
+2、快照重启、直接重启、启动 都需要调用inspect接口
+
+3、快照重启、直接重启调用inspect接口前,需要pause接口返回成功
+
+4、启动 不需要调用pause接口
+
+5、快照重启、直接重启,勾选了“确认所有批量作业”并确认,或者一直确认到最后一步(中间的某一步,如果inspections返回为[],则表示这一步不需要确认,直接跳到下一步),调用bulk/execution接口
+
+6、启动 如果inspect接口返回的inspections为[],则直接调用execute接口
+
+目前启动检查只对版本(version)和快照(snapshot)做检查,版本检查将version键值对里的now最新版本和last上一个启动版本拿出来做显示:**确定以最新发布版本: {now.version} 启动作业{job.name}?(上次启动版本: {last.version}**;快照检查则是将snapshot键值对里的path信息提取出来,做显示: **确定使用如下快照恢复作业 {job.name} \n: {path}**。如果接口返回异常,则直接抛错 "作业启动检查失败, {message}",同时如果inspections检查项目为空,也不必做弹窗显示。
+
+最后要根据用户的选择,判断是否启动该作业。
+
+![upgrade3](../../../images/upgrade4.png)
+
+对于批量重启的场景,批量启动作业之前,要为每个作业调用一次启动检查接口,并做弹窗显示;这样势必会有大量弹窗确认,所以对于批量场景,弹窗内容下方加一个"口确认所有批量作业"的可勾选项,如果用户选中后,则记录下用户这次的选项,该批量后面的所有作业可以不必发起启动检查,直接取这次的选项值即可。
+
+![upgrade3](../../../images/upgrade5.png)
+
+![upgrade3](../../../images/upgrade6.png)
+
+![upgrade3](../../../images/upgrade7.png)
+
+![upgrade3](../../../images/upgrade8.png)
+
+## 3、【流作业配置管理】streamis应用参数配置导入需求
+
+【背景】streamis 应用配置,现需手工录入参数配置,不方便批量配置。
+
+【需求】1.提供导入功能,以json包格式快速导入应用参数,用于对接AOMP发布模版,实现应用参数AOMP发布及配置2.提供应用参数配置模版引用功能,各应用的参数配置除启动资源外,大部分相同,引用其他应用快速配置
+
+【需求设计】此前streamis的设计是将作业应用的导入和配置项的设置区分为两个接口,分别是:1)streamis作业导入:/streamis/streamJobManager/job/upload POST mutilpart-form2)streamis作业配置项的设置:/streamis/streamJobManager/config/json/{jobId} POST application-json其中作业的导入是以文件上传的形式实现的,文件必须为zip为后缀的压缩文件,压缩文件里附有任务的定义文件meta.json,基础格式如下:
+
+```yaml
+{
+ "projectName": "", # 项目名
+ "jobName": "", # 作业名
+ "jobType": "flink.sql", # 目前只支持flink.sql、flink.jar
+ "tags": "", # 应用标签
+ "description": "" # 作业描述,
+ "jobContent": {
+ # 不同的jobType,其内容各不相同,具体请往下看
+ }
+}
+```
+
+现在我们将任务配置参数融入进定义文件meta.json中,融入后的格式如下:
+
+```yaml
+{
+ "projectName": "", # 项目名
+ "jobName": "", # 作业名
+ "jobType": "flink.sql", # 目前只支持flink.sql、flink.jar
+ "tags": "", # 应用标签
+ "description": "" # 作业描述,
+ "jobContent": {
+ # 不同的jobType,其内容各不相同,具体请往下看
+ },
+ "jobConfig": {
+ "wds.linkis.flink.resource": {
+ "wds.linkis.flink.app.parallelism":"4",
+ "wds.linkis.flink.jobmanager.memory":"2048",
+ "wds.linkis.flink.taskmanager.memory":"4096",
+ "wds.linkis.flink.taskmanager.numberOfTaskSlots":"2",
+ "wds.linkis.flink.taskmanager.cpus":"2",
+ "wds.linkis.rm.yarnqueue":""
+ },
+ "wds.linkis.flink.custom": {
+ "stream.log.filter.keywords":"ERROR,WARN,INFO",
+ "security.kerberos.krb5-conf.path":"",
+ "demo.log.tps.rate":"20000",
+ "classloader.resolve-order":"parent-first",
+ "stream.log.debug":"true",
+ "security.kerberos.login.contexts":"KafkaClient,Client",
+ "security.kerberos.login.keytab":"",
+ "security.kerberos.login.principal":""
+ },
+ "wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"OFF",
+ "wds.linkis.flink.app.fail-restart.switch":"OFF",
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF"
+ }
+ }
+}
+```
+
+后台在任务创建过程中将开启事务,事务里首先进行任务插入操作,然后调用任务参数新增服务,都处理成功后,事务提交,否则回滚所有插入操作。**注明:默认参数会自动填充,非定义参数直接过滤,例如Flink自定义参数是不做校验的。** 默认参数如下:
+
+```yaml
+资源配置(默认参数):
+wds.linkis.flink.app.parallelism: "4"
+wds.linkis.flink.jobmanager.memory: "1024"
+wds.linkis.flink.taskmanager.cpus: "2"
+wds.linkis.flink.taskmanager.memory: "4096"
+wds.linkis.flink.taskmanager.numberOfTaskSlots: "2"
+wds.linkis.rm.yarnqueue: ""
+Flink参数(自定义参数):
+配置项为空
+生产配置(默认参数):
+wds.linkis.flink.app.fail-restart.switch: "OFF"
+wds.linkis.flink.app.start-auto-restore.switch: "OFF"
+wds.linkis.flink.checkpoint.switch: "OFF"
+wds.linkis.flink.savepoint.path: ""
+权限设置:
+wds.linkis.flink.authority.visible: ""
+```
+
+## 4、【驱动管理】检查点自恢复策略优化调整
+
+**需求背景:**在对作业任务的检查点(checkpoint/savepoint)进行自恢复的过程中碰到问题:如果任务连续发生多次重启恢复,由于每次只能获取上个时间点任务的检查点,当重启后的任务在创建检查点之前异常退出,那么后续再重启的任务就会因为无法获得检查点信息,而造成数据的丢失。
+
+**需求设计:**上述需求问题需要对streamis的检查点自恢复策略进行调整,每次任务成功启动,都不应该丢弃使用过的检查点信息,要传递到下一个任务中去。为实现检查点信息能够在作业任务之间进行传递,我们要做如下设计:
+
+1)每次任务启动的时候,把选定的检查点(checkpoint/savepoint)相关信息保存在任务的jobInfo对象属性里去,每次任务成功launch后,持久化jobInfo对象。
+
+2)任务启动恢复的时候,从三个地方查找检查点数据,一个是根据任务的jobInfo字段反解析后得到的map对象,一个是任务的checkpoint路径,最后一个是任务的savepoint路径;把三个地方获得的检查点数据放在一起排序,取创建/修改时间最新的一位。
+
+![upgrade3](../../../images/update_version1.png)
+
+![upgrade3](../../../images/update_version2.png)
+
+如上图,任务在16:54启动时用的是最新的savepoint文件启动3)如果是用户配置了快照路径或在弹窗中确认了快照路径,则为最高优先级
+
+![upgrade3](../../../images/update_version3.png)
+
+![upgrade3](../../../images/update_version4.png)
+
+![upgrade3](../../../images/update_version5.png)
+
+
+
+### 5. 分离式特性重构
+0.3.0版本对分离式flink引擎特性进行重构,支持分离式提交。提交后linkis flink引擎在yarn flink应用创建成功后,自动退出,释放jvm资源。streamis继续对yarn上flink 应用进行生命周期管理。
+
+![1.1](../../../images/版本功能介绍/1.1.png)
+
+从0.3.0起,任务默认是非分离式,如果要改为分离式,有如下两种方式
+###### ①导入时生产参数增加配置参数 linkis.ec.app.manage.mode ,值 detach - 分离式 , attach - 非分离式(默认)
+```
+"wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"ON",
+ "wds.linkis.flink.alert.failure.user":"",
+ "wds.linkis.flink.app.fail-restart.switch":"OFF",
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF",
+ "linkis.ec.app.manage.mode": "detach"
+ }
+```
+
+###### ②在任务配置页面,将管理模式改为 detach
+![1.2](../../../images/版本功能介绍/1.2.png)
+
+
+
+
+### 6. 标签筛选
+支持继续标签筛选任务
+
+![2.1](../../../images/版本功能介绍/2.1.png)
+
+### 7. 标签批量修改
+
+先点批量修改,然后选中多个任务,点 修改标签。在弹出窗口输入新标签内容,支持大小写字母、数字、逗号、下划线。
+
+![3.1](../../../images/版本功能介绍/3.1.png)
+![3.2](../../../images/版本功能介绍/3.2.png)
+
+
+### 8. 上传job zip包自动清理
+导入任务后,在服务端的zip 缓存会自动清理
+
+### 9. 任务启动前检查失败告警人
+
+失败告警人必须为非hduser用户,否则不能启动。
+可以在导入任务时,或者任务配置界面配置告警人:
+###### ①导入任务,生产配置里配置告警人 wds.linkis.flink.alert.failure.user
+```
+"wds.linkis.flink.produce": {
+ "wds.linkis.flink.checkpoint.switch":"ON",
+ "wds.linkis.flink.alert.failure.user":"",
+ "wds.linkis.flink.app.fail-restart.switch":"OFF",
+ "wds.linkis.flink.app.start-auto-restore.switch":"OFF",
+ "linkis.ec.app.manage.mode": "detach"
+ }
+```
+
+### 10. 上传项目资源文件添加md5
+在项目资源管理页面,上传的文件会有更新时间及对应md5值,md5值跟linux命令行结果一致
+
+![6.1](../../../images/版本功能介绍/6.1.png)
+![6.2](../../../images/版本功能介绍/6.2.png)
+
+### 11. 启动作业时,会自动检查yarn上同名应用
+点击应用名,可以跳转到yarn页面
+![7.1](../../../images/版本功能介绍/7.1.png)
+
diff --git a/pom.xml b/pom.xml
index eabaaba66..defe1578c 100644
--- a/pom.xml
+++ b/pom.xml
@@ -21,7 +21,7 @@
com.webank.wedatasphere.streamis
streamis
- 0.2.0
+ 0.3.0
pom
Streamis Project Parent POM
@@ -45,14 +45,16 @@
- 1.1.1
- 1.1.0
- 0.2.0
- 2.11.12
+ 1.4.0
+ 4.12
+ 1.1.2
+ 0.3.0
+ 2.12.17
1.8
3.3.3
- 2.8.5
- 2.11.3
+ 2.10.1
+ 1.31
+ 2.13.2
3.1.1
4.5.4
4.5.4
@@ -68,10 +70,10 @@
0.9.10
2.21
1.9.5
- 1.4.19
- 0.2.0
+ 0.3.0
5.1.47
2.0.1.Final
+ 1.4.20
@@ -134,7 +136,11 @@
linkis-common
${linkis.version}
-
+
+ org.yaml
+ snakeyaml
+ ${snakeyaml.version}
+
org.apache.linkis
linkis-protocol
@@ -177,7 +183,7 @@
org.apache.maven.plugins
maven-source-plugin
- 3.1.0
+ 3.2.1
true
@@ -195,12 +201,12 @@
org.apache.maven.plugins
maven-deploy-plugin
- 3.0.0-M1
+ 3.0.0
org.apache.maven.plugins
maven-gpg-plugin
- 1.5
+ 3.0.1
sign-artifacts
@@ -222,7 +228,7 @@
org.apache.maven.plugins
maven-deploy-plugin
- 2.8.2
+ 3.0.0
org.apache.maven.plugins
@@ -314,9 +320,4 @@
-
-
-
-
-
\ No newline at end of file
diff --git a/streamis-appconn/pom.xml b/streamis-appconn/pom.xml
index 54b590f91..40fd6e52f 100644
--- a/streamis-appconn/pom.xml
+++ b/streamis-appconn/pom.xml
@@ -5,7 +5,7 @@
streamis
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java
index 46507753f..0f7abbb0d 100644
--- a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/constraints/Constraints.java
@@ -7,6 +7,9 @@
*/
public class Constraints {
+ private Constraints(){}
+
+
// AppConn name
public static final String STREAMIS_APPCONN_NAME = CommonVars.apply("wds.dss.appconn.streamis.name", "Streamis").getValue();
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java
index e8d000c2b..026700bce 100644
--- a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/project/StreamisProjectCreationOperation.java
@@ -35,7 +35,6 @@ public ProjectResponseRef createProject(DSSProjectContentRequestRef.DSSProjectCo
Workspace workspace = dssProjectContentRequestRef.getWorkspace();
DSSProjectPrivilege dssProjectPrivilege = dssProjectContentRequestRef.getDSSProjectPrivilege();
if(dssProject == null || dssProjectPrivilege == null){
- //TODO error code need to amend
throw new StreamisAppConnErrorException(-1, "the dssProject or dssProjectPrivilege is null");
}
streamisPostAction.addRequestPayload("projectName",dssProject.getName());
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java
index 3f3421948..a26e04b70 100644
--- a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectContentReqRef.java
@@ -8,6 +8,6 @@
public class StreamisProjectContentReqRef extends StreamisStructureReqRef
implements RefProjectContentRequestRef {
public StreamisProjectContentReqRef(){
-
+ //Do nothing because of appconn
}
}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java
index 3bd209dc2..9acaab75c 100644
--- a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/structure/ref/StreamisProjectUpdateReqRef.java
@@ -7,6 +7,6 @@
*/
public class StreamisProjectUpdateReqRef extends StreamisStructureReqRef implements ProjectUpdateRequestRef {
public StreamisProjectUpdateReqRef(){
-
+ //Do nothing because of appconn
}
}
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java
index db22cf871..527b6e405 100644
--- a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/NumberUtils.java
@@ -2,6 +2,8 @@
public class NumberUtils {
+ private NumberUtils() {}
+
public static Integer getInt(Object original) {
if (original instanceof Double) {
return ((Double) original).intValue();
diff --git a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java
index 2db1d396e..91102032f 100644
--- a/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java
+++ b/streamis-appconn/src/main/java/com/webank/wedatasphere/streamis/dss/appconn/utils/StreamisCommonUtil.java
@@ -18,7 +18,9 @@
public class StreamisCommonUtil {
- private final static Logger logger = LoggerFactory.getLogger(StreamisCommonUtil.class);
+ private StreamisCommonUtil() {}
+
+ private static final Logger logger = LoggerFactory.getLogger(StreamisCommonUtil.class);
public static SSOUrlBuilderOperation getSSOUrlBuilderOperation(WorkspaceRequestRef requestRef, String url) {
SSOUrlBuilderOperation ssoUrlBuilderOperation = SSOHelper.createSSOUrlBuilderOperation(requestRef.getWorkspace());
diff --git a/streamis-jobmanager/pom.xml b/streamis-jobmanager/pom.xml
index b1a644e09..58a9cda15 100644
--- a/streamis-jobmanager/pom.xml
+++ b/streamis-jobmanager/pom.xml
@@ -20,7 +20,7 @@
streamis
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
@@ -32,6 +32,8 @@
streamis-job-manager
streamis-jobmanager-server
streamis-projectmanager-server
+ streamis-job-log
+ streamis-job-entrypoint
diff --git a/streamis-jobmanager/streamis-job-entrypoint/pom.xml b/streamis-jobmanager/streamis-job-entrypoint/pom.xml
new file mode 100644
index 000000000..ca0f96962
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/pom.xml
@@ -0,0 +1,34 @@
+
+
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.3.0
+
+ 4.0.0
+ pom
+ streamis-job-entrypoint
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/pom.xml b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/pom.xml
new file mode 100644
index 000000000..9738941dc
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/pom.xml
@@ -0,0 +1,88 @@
+
+
+
+
+
+ streamis-job-entrypoint
+ com.webank.wedatasphere.streamis
+ 0.3.0
+
+ 4.0.0
+
+ streamis-job-entrypoint-common
+
+
+ 8
+ 8
+
+
+
+
+ org.apache.linkis
+ linkis-module
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ 2.3.7.RELEASE
+ test
+
+
+ org.apache.linkis
+ linkis-rpc
+ 1.1.3
+
+
+ com.webank.wedatasphere.streamis
+ streamis-jobmanager-common
+ 0.3.0
+
+
+ junit
+ junit
+ 4.13.2
+ test
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-deploy-plugin
+
+
+ net.alchim31.maven
+ scala-maven-plugin
+
+
+ org.apache.maven.plugins
+ maven-jar-plugin
+
+
+
+
+ src/main/java
+
+ **/*.xml
+
+
+
+ ${project.artifactId}-${project.version}
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/StreamJobEntrypoint.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/StreamJobEntrypoint.java
new file mode 100644
index 000000000..fcf9af98f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/StreamJobEntrypoint.java
@@ -0,0 +1,25 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint;
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.exception.JobHeartbeatException;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.producer.StreamJobHeartbeatProducer;
+import org.apache.commons.lang3.StringUtils;
+
+public interface StreamJobEntrypoint {
+
+ /**
+ * @param config
+ * @return
+ */
+ default void register(StreamJobConfig config, StreamJobHeartbeatProducer producer) throws JobHeartbeatException {
+ throw new JobHeartbeatException(-1, "This method cannot be called, call the method of the subclass");
+ }
+
+ default Boolean checkConfig(StreamJobConfig config) {
+ return (StringUtils.isNoneBlank(config.getApplicationId()) &&
+ StringUtils.isNoneBlank(config.getResourceManagerAddress()) &&
+ StringUtils.isNoneBlank(config.getJobStatus()) &&
+ StringUtils.isNoneBlank(config.getJobName()) &&
+ StringUtils.isNoneBlank(config.getProjectName()));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/StreamJobHeartbeatListener.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/StreamJobHeartbeatListener.java
new file mode 100644
index 000000000..41bc22e2f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/StreamJobHeartbeatListener.java
@@ -0,0 +1,51 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint;
+
+import java.util.Map;
+import java.util.Timer;
+import java.util.TimerTask;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * @author jefftlin
+ *
+ * Server
+ */
+public class StreamJobHeartbeatListener {
+
+ Map serviceAndPortMap;
+
+ /**
+ * Need to launch
+ */
+ public void init() {
+ serviceAndPortMap = new ConcurrentHashMap<>();
+
+ this.listenFlinkJob();
+ this.listenSparkJob();
+ }
+
+ private void listenFlinkJob() {
+ Timer timer = new Timer("flinkStreamJobHeartbeatListener", true);
+ timer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ //nothing
+
+
+ }
+ }, 1000, 10000);
+ }
+
+ private void listenSparkJob() {
+ Timer timer = new Timer("sparkStreamJobHeartbeatListener", true);
+ timer.schedule(new TimerTask() {
+ @Override
+ public void run() {
+ //nothing
+
+
+ }
+ }, 1000, 10000);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/StreamJobConfig.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/StreamJobConfig.java
new file mode 100644
index 000000000..bb3572210
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/StreamJobConfig.java
@@ -0,0 +1,77 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.config;
+
+public class StreamJobConfig {
+
+ private String streamisServerUrl;
+
+ private String applicationId;
+
+ private String resourceManagerAddress;
+
+ private String jobStatus;
+
+ private String jobName;
+
+ private String projectName;
+
+ public String getStreamisServerUrl() {
+ return streamisServerUrl;
+ }
+
+ public void setStreamisServerUrl(String streamisServerUrl) {
+ this.streamisServerUrl = streamisServerUrl;
+ }
+
+ public String getApplicationId() {
+ return applicationId;
+ }
+
+ public void setApplicationId(String applicationId) {
+ this.applicationId = applicationId;
+ }
+
+ public String getResourceManagerAddress() {
+ return resourceManagerAddress;
+ }
+
+ public void setResourceManagerAddress(String resourceManagerAddress) {
+ this.resourceManagerAddress = resourceManagerAddress;
+ }
+
+ public String getJobStatus() {
+ return jobStatus;
+ }
+
+ public void setJobStatus(String jobStatus) {
+ this.jobStatus = jobStatus;
+ }
+
+ public String getJobName() {
+ return jobName;
+ }
+
+ public void setJobName(String jobName) {
+ this.jobName = jobName;
+ }
+
+ public String getProjectName() {
+ return projectName;
+ }
+
+ public void setProjectName(String projectName) {
+ this.projectName = projectName;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamJobConfig{" +
+ "streamisServerUrl='" + streamisServerUrl + '\'' +
+ ", applicationId='" + applicationId + '\'' +
+ ", resourceManagerAddress='" + resourceManagerAddress + '\'' +
+ ", jobStatus='" + jobStatus + '\'' +
+ ", jobName='" + jobName + '\'' +
+ ", projectName='" + projectName + '\'' +
+ '}';
+ }
+}
+
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/exception/JobHeartbeatException.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/exception/JobHeartbeatException.java
new file mode 100644
index 000000000..7a9f9cd79
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/exception/JobHeartbeatException.java
@@ -0,0 +1,11 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.exception;
+
+import org.apache.linkis.common.exception.ErrorException;
+
+public class JobHeartbeatException extends ErrorException {
+
+ public JobHeartbeatException(int errCode, String desc) {
+ super(errCode, desc);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/SenderHttpConfig.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/SenderHttpConfig.scala
new file mode 100644
index 000000000..9910a4cd5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/SenderHttpConfig.scala
@@ -0,0 +1,47 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.config
+
+import java.net.URL
+
+class SenderHttpConfig {
+
+ /**
+ * Request url
+ */
+ private var url: URL = _
+
+ /**
+ * Request type
+ * POST/GET
+ */
+ private var requestType: String = _
+
+ /**
+ * Request headers
+ */
+ private var headers: Map[String, String] = _
+
+ /**
+ * Request body
+ */
+ private var requestBody: String = _
+
+ /**
+ * Timeout of http request
+ */
+ private var requestTimeout : scala.concurrent.duration.Duration = _
+
+ def getUrl: URL = this.url
+ def setUrl(url: URL): Unit = this.url = url
+
+ def getRequestType: String = this.requestType
+ def setRequestType(requestType: String): Unit = this.requestType = requestType
+
+ def getHeaders: Map[String, String] = this.headers
+ def setHeaders(headers: Map[String, String]): Unit = this.headers = headers
+
+ def getRequestBody: String = this.requestBody
+ def setRequestBody(requestBody: String): Unit = this.requestBody = requestBody
+
+ def getRequestTimeout: scala.concurrent.duration.Duration = this.requestTimeout
+ def setRequestTimeout(requestTimeout: scala.concurrent.duration.Duration): Unit = this.requestTimeout = requestTimeout
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/message/JobHeartbeatMessage.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/message/JobHeartbeatMessage.scala
new file mode 100644
index 000000000..38aad896e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/message/JobHeartbeatMessage.scala
@@ -0,0 +1,26 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.message
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig
+
+class JobHeartbeatMessage {
+
+ var streamJobConfig: StreamJobConfig = _
+
+ var engineType: String = _
+
+ var engineVersion: String = _
+
+ def getStreamJobConfig: StreamJobConfig = this.streamJobConfig
+
+ def setStreamJobConfig(streamJobConfig: StreamJobConfig): Unit = this.streamJobConfig = streamJobConfig
+
+ def getEngineType: String = this.engineType
+
+ def setEngineType(engineType: String): Unit = this.engineType = engineType
+
+ def getEngineVersion: String = this.engineVersion
+
+ def setEngineVersion(engineVersion: String): Unit = this.engineVersion = engineVersion
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/StreamJobHeartbeatProducer.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/StreamJobHeartbeatProducer.scala
new file mode 100644
index 000000000..d4f5ce415
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/StreamJobHeartbeatProducer.scala
@@ -0,0 +1,10 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.producer
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+
+trait StreamJobHeartbeatProducer {
+
+ def produce(streamJobConfig: StreamJobConfig): JobHeartbeatMessage
+
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/StreamJobHeartbeatSender.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/StreamJobHeartbeatSender.scala
new file mode 100644
index 000000000..3433a935f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/StreamJobHeartbeatSender.scala
@@ -0,0 +1,43 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.sender
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.exception.JobHeartbeatException
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.HttpClientUtil
+import org.apache.http.client.methods.HttpPost
+import org.apache.http.impl.client.CloseableHttpClient
+import org.apache.linkis.common.utils.Logging
+
+class StreamJobHeartbeatSender extends Logging {
+
+ private var httpClient: CloseableHttpClient = null
+
+ private var postRequest: HttpPost = null
+
+ private var httpClientUtil: HttpClientUtil = null
+
+// private var senderHttpConfig: SenderHttpConfig = null
+
+ def getHttpClient: CloseableHttpClient = this.httpClient
+ def setHttpClient(httpClient: CloseableHttpClient): Unit = this.httpClient = httpClient
+
+ def getPostRequest: HttpPost = this.postRequest
+ def setPostRequest(postRequest: HttpPost): Unit = this.postRequest = postRequest
+
+ def getHttpClientUtil: HttpClientUtil = this.httpClientUtil
+ def setHttpClientUtil(httpClientUtil: HttpClientUtil): Unit = this.httpClientUtil = httpClientUtil
+
+
+ def init(httpClient: CloseableHttpClient, postRequest: HttpPost): Unit = {
+ this.httpClient = httpClient
+ this.postRequest = postRequest
+ this.httpClientUtil = new HttpClientUtil()
+ }
+
+ /**
+ * Send heartbeat request
+ * @return
+ */
+ def send(message: JobHeartbeatMessage): String = {
+ throw new JobHeartbeatException(-1, "This method cannot be called, call the method of the subclass")
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/service/StreamJobHeartbeatService.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/service/StreamJobHeartbeatService.scala
new file mode 100644
index 000000000..3efd55c7e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-common/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/service/StreamJobHeartbeatService.scala
@@ -0,0 +1,54 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.service
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.sender.StreamJobHeartbeatSender
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.{HttpClientUtil, RetryUtil}
+import org.apache.linkis.common.conf.{CommonVars, TimeType}
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.common.utils.Logging
+
+import java.util.concurrent.{Callable, Future, ThreadPoolExecutor, TimeUnit}
+
+class StreamJobHeartbeatService extends Logging {
+
+ val RETRY_TIMES: CommonVars[Int] = CommonVars("wds.streamis.job.heartbeat.retry.times", 3)
+
+ val RETRY_INTERVAL: CommonVars[Long] = CommonVars("wds.streamis.job.heartbeat.retry.interval", 1000L)
+
+ val EXPONENTIAL: CommonVars[Boolean] = CommonVars("wds.streamis.job.heartbeat.retry.exponential", true)
+
+ val HEARTBEAT_INTERVAL_TIME: CommonVars[Long] = CommonVars("wds.streamis.job.heartbeat.interval.time", 30)
+
+ val JOB_HEARTBEAT_INTERVAL: CommonVars[TimeType] = CommonVars("wds.streamis.task.monitor.interval", new TimeType("1m"))
+
+ var asyncExecutor: ThreadPoolExecutor = null
+
+ def init(): Unit = {
+ asyncExecutor = RetryUtil.createThreadPoolExecutor()
+ }
+
+ /**
+ * Start job heartbeat
+ * @param thread
+ */
+ def start(message: JobHeartbeatMessage, sender: StreamJobHeartbeatSender): Unit = {
+ Utils.defaultScheduler.scheduleAtFixedRate(new Runnable {
+ override def run(): Unit = Utils.tryAndWarnMsg {
+ RetryUtil.asyncExecuteWithRetry(new Callable[String] {
+ override def call(): String = {
+ print("Call send method")
+ sender.send(message)
+ }
+ }, RETRY_TIMES.value, RETRY_INTERVAL.value, EXPONENTIAL.value, HEARTBEAT_INTERVAL_TIME.value, asyncExecutor)
+ }("Send job heartbeat failed!")
+ }, JOB_HEARTBEAT_INTERVAL.getValue.toLong, JOB_HEARTBEAT_INTERVAL.getValue.toLong, TimeUnit.MILLISECONDS)
+
+
+ Runtime.getRuntime.addShutdownHook(new Thread(new Runnable {
+ override def run(): Unit = {
+ logger.info("Cancel sending heartbeat information, current message is: ",
+ message.getStreamJobConfig.toString)
+ }
+ }))
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/pom.xml b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/pom.xml
new file mode 100644
index 000000000..da4763cf1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/pom.xml
@@ -0,0 +1,28 @@
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-entrypoint-flink
+
+
+ 8
+ 8
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-entrypoint-common
+ 0.3.0
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/FlinkStreamJobEntrypoint.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/FlinkStreamJobEntrypoint.java
new file mode 100644
index 000000000..156367281
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/FlinkStreamJobEntrypoint.java
@@ -0,0 +1,71 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint;
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.FlinkStreamJobConfig;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.exception.JobHeartbeatException;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.producer.StreamJobHeartbeatProducer;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.sender.FlinkStreamJobHeartbeatSender;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.service.StreamJobHeartbeatService;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.GsonUtil;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.HttpClientUtil;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+
+import java.io.UnsupportedEncodingException;
+import java.util.HashMap;
+import java.util.Map;
+
+public class FlinkStreamJobEntrypoint extends StreamJobEntrypoint {
+
+ /**
+ * @param config
+ * @return
+ */
+ @Override
+ public void register(StreamJobConfig config, StreamJobHeartbeatProducer producer) throws JobHeartbeatException {
+ if (!this.checkConfig(config)) {
+ throw new JobHeartbeatException(-1, "Incorrect configuration parameters");
+ }
+
+ // Produce message
+ JobHeartbeatMessage message = producer.produce(config);
+
+ // Create sender and init
+ FlinkStreamJobHeartbeatSender sender = new FlinkStreamJobHeartbeatSender();
+
+ CloseableHttpClient httpClient = HttpClientUtil.createHttpClientUtil(null);
+
+ Map requestBody = new HashMap<>();
+ requestBody.put("message", message);
+ StringEntity entity = null;
+ try {
+ entity = new StringEntity(GsonUtil.toJson(requestBody));
+ } catch (UnsupportedEncodingException e) {
+ throw new UnsupportedEncodingException(e);
+ }
+ entity.setContentEncoding("UTF-8");
+ entity.setContentType("application/json");
+ HttpPost postRequest = HttpClientUtil.getPostRequest(config.getStreamisServerUrl(), entity);
+
+ sender.init(httpClient, postRequest);
+
+ // Send job heartbeat
+ StreamJobHeartbeatService service = new StreamJobHeartbeatService();
+ service.init();
+ service.start(message, sender);
+
+ }
+
+ @Override
+ public Boolean checkConfig(StreamJobConfig config) {
+ if (config instanceof FlinkStreamJobConfig) {
+ return super.checkConfig(config) &&
+ StringUtils.isNoneBlank(((FlinkStreamJobConfig) config).getApplicationUrl()) &&
+ StringUtils.isNoneBlank(((FlinkStreamJobConfig) config).getJobId());
+ }
+ return false;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/FlinkStreamJobConfig.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/FlinkStreamJobConfig.java
new file mode 100644
index 000000000..80a713dba
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/FlinkStreamJobConfig.java
@@ -0,0 +1,24 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.config;
+
+public class FlinkStreamJobConfig extends StreamJobConfig {
+
+ private String applicationUrl;
+
+ private String jobId;
+
+ public String getApplicationUrl() {
+ return applicationUrl;
+ }
+
+ public void setApplicationUrl(String applicationUrl) {
+ this.applicationUrl = applicationUrl;
+ }
+
+ public String getJobId() {
+ return jobId;
+ }
+
+ public void setJobId(String jobId) {
+ this.jobId = jobId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/FlinkStreamJobHeartbeatProducer.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/FlinkStreamJobHeartbeatProducer.scala
new file mode 100644
index 000000000..a9103e360
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/FlinkStreamJobHeartbeatProducer.scala
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.producer
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+
+class FlinkStreamJobHeartbeatProducer extends StreamJobHeartbeatProducer {
+
+ override def produce(streamJobConfig: StreamJobConfig): JobHeartbeatMessage = {
+ var message: JobHeartbeatMessage = new JobHeartbeatMessage
+ message.setStreamJobConfig(streamJobConfig)
+ message.setEngineType("flink")
+ message.setEngineVersion("1.12.2")
+ message
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/FlinkStreamJobHeartbeatSender.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/FlinkStreamJobHeartbeatSender.scala
new file mode 100644
index 000000000..127d502f1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-flink/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/FlinkStreamJobHeartbeatSender.scala
@@ -0,0 +1,19 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.sender
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+
+class FlinkStreamJobHeartbeatSender extends StreamJobHeartbeatSender {
+
+ /**
+ * Send heartbeat request
+ *
+ * @return
+ */
+ override def send(message: JobHeartbeatMessage): String = {
+ // Heartbeat request
+ val result = this.getHttpClientUtil.executeAndGet(this.getHttpClient, this.getPostRequest, classOf[String])
+ result
+
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/pom.xml b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/pom.xml
new file mode 100644
index 000000000..bd3528f2f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/pom.xml
@@ -0,0 +1,28 @@
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-entrypoint-spark
+
+
+ 8
+ 8
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-entrypoint-common
+ 0.3.0
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/SparkStreamJobEntrypoint.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/SparkStreamJobEntrypoint.java
new file mode 100644
index 000000000..2507e8508
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/SparkStreamJobEntrypoint.java
@@ -0,0 +1,70 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint;
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.SparkStreamJobConfig;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.exception.JobHeartbeatException;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.producer.StreamJobHeartbeatProducer;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.sender.SparkStreamJobHeartbeatSender;
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.service.StreamJobHeartbeatService;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.GsonUtil;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.HttpClientUtil;
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.StringEntity;
+import org.apache.http.impl.client.CloseableHttpClient;
+
+import java.io.UnsupportedEncodingException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+public class SparkStreamJobEntrypoint extends StreamJobEntrypoint {
+
+ /**
+ * @param config
+ * @return
+ */
+ @Override
+ public void register(StreamJobConfig config, StreamJobHeartbeatProducer producer) throws JobHeartbeatException {
+ if (!this.checkConfig(config)) {
+ throw new JobHeartbeatException(-1, "Incorrect configuration parameters");
+ }
+
+ // Produce message
+ JobHeartbeatMessage message = producer.produce(config);
+
+ // Create sender and init
+ SparkStreamJobHeartbeatSender sender = new SparkStreamJobHeartbeatSender();
+
+ Properties prop = HttpClientUtil.getSecurityProperties();
+ CloseableHttpClient httpClient = HttpClientUtil.createHttpClientUtil(prop);
+
+ Map requestBody = new HashMap<>();
+ requestBody.put("message", message);
+ StringEntity entity = null;
+ try {
+ entity = new StringEntity(GsonUtil.toJson(requestBody));
+ } catch (UnsupportedEncodingException e) {
+ throw new UnsupportedEncodingException(e);
+ }
+ entity.setContentEncoding("UTF-8");
+ entity.setContentType("application/json");
+ HttpPost postRequest = HttpClientUtil.getPostRequest(config.getStreamisServerUrl(), entity);
+
+ sender.init(httpClient, postRequest);
+
+ // Send job heartbeat
+ StreamJobHeartbeatService service = new StreamJobHeartbeatService();
+ service.init();
+ service.start(message, sender);
+
+ }
+
+ @Override
+ public Boolean checkConfig(StreamJobConfig config) {
+ if (config instanceof SparkStreamJobConfig) {
+ return super.checkConfig(config);
+ }
+ return false;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/SparkStreamJobConfig.java b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/SparkStreamJobConfig.java
new file mode 100644
index 000000000..6e2a71f58
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/java/com/webank/wedatasphere/streamis/jobmanager/entrypoint/config/SparkStreamJobConfig.java
@@ -0,0 +1,5 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.config;
+
+public class SparkStreamJobConfig extends StreamJobConfig {
+
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/SparkStreamJobHeartbeatProducer.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/SparkStreamJobHeartbeatProducer.scala
new file mode 100644
index 000000000..dfadf61a6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/producer/SparkStreamJobHeartbeatProducer.scala
@@ -0,0 +1,14 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.producer
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.config.StreamJobConfig
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+
+class SparkStreamJobHeartbeatProducer extends StreamJobHeartbeatProducer {
+
+ override def produce(streamJobConfig: StreamJobConfig): JobHeartbeatMessage = {
+ var message: JobHeartbeatMessage = new JobHeartbeatMessage
+ message.setStreamJobConfig(streamJobConfig)
+ message.setEngineType("spark")
+ message
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/SparkStreamJobHeartbeatSender.scala b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/SparkStreamJobHeartbeatSender.scala
new file mode 100644
index 000000000..02bfbf100
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-entrypoint/streamis-job-entrypoint-spark/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/entrypoint/sender/SparkStreamJobHeartbeatSender.scala
@@ -0,0 +1,20 @@
+package com.webank.wedatasphere.streamis.jobmanager.entrypoint.sender
+
+import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+
+class SparkStreamJobHeartbeatSender extends StreamJobHeartbeatSender {
+
+
+ /**
+ * Send heartbeat request
+ *
+ * @return
+ */
+ override def send(message: JobHeartbeatMessage): String = {
+
+ // Heartbeat request
+ val result = this.getHttpClientUtil.executeAndGet(this.getHttpClient, this.getPostRequest, classOf[String])
+ result
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/pom.xml b/streamis-jobmanager/streamis-job-launcher/pom.xml
index 8964504e1..96e618d27 100755
--- a/streamis-jobmanager/streamis-job-launcher/pom.xml
+++ b/streamis-jobmanager/streamis-job-launcher/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
@@ -29,7 +29,7 @@
streamis-job-launcher-base
- streamis-job-launcher-service
streamis-job-launcher-linkis
+ streamis-job-launcher-service
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml
index 00f62af34..d38fad2ad 100755
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
../../pom.xml
4.0.0
@@ -37,6 +37,13 @@
org.apache.linkis
linkis-common
+ ${linkis.version}
+
+
+
+ org.apache.linkis
+ linkis-computation-governance-common
+ ${linkis.version}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java
index 9b58e797c..83409e3cf 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobConfDefinition.java
@@ -1,6 +1,6 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
@@ -99,6 +99,8 @@ public class JobConfDefinition {
*/
private boolean required;
+ private boolean mark;
+
public JobConfDefinition(){
}
@@ -238,4 +240,12 @@ public boolean isRequired() {
public void setRequired(boolean required) {
this.required = required;
}
+
+ public boolean isMark() {
+ return mark;
+ }
+
+ public void setMark(boolean mark) {
+ this.mark = mark;
+ }
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java
index acb9cfafa..8a5cde7d3 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUser.java
@@ -1,6 +1,6 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java
index 616bdd04b..c78ad3f73 100755
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/JobUserRole.java
@@ -1,6 +1,6 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java
index 1529a326f..8ea1b5bd6 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/JobConfDefinitionVo.java
@@ -8,7 +8,6 @@
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import java.util.Optional;
/**
* According to JobConfDefinition
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/YarnAppVo.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/YarnAppVo.java
new file mode 100644
index 000000000..074678c39
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/entity/vo/YarnAppVo.java
@@ -0,0 +1,66 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo;
+
+public class YarnAppVo {
+
+ /*
+ Be same with org.apache.linkis.governance.common.constant.ec.ECConstants
+ */
+
+ private String applicationId;
+
+ private String applicationUrl;
+
+ private String applicationState;
+
+ private String applicationName;
+
+ private String yarnAppType;
+
+ public YarnAppVo() {}
+
+ public YarnAppVo(String appId, String appUrl, String appState) {
+ this.setApplicationId(appId);
+ this.setApplicationUrl(appUrl);
+ this.setApplicationState(appState);
+ }
+
+ public String getApplicationId() {
+ return applicationId;
+ }
+
+ public void setApplicationId(String applicationId) {
+ this.applicationId = applicationId;
+ }
+
+ public String getApplicationUrl() {
+ return applicationUrl;
+ }
+
+ public void setApplicationUrl(String applicationUrl) {
+ this.applicationUrl = applicationUrl;
+ }
+
+ public String getApplicationState() {
+ return applicationState;
+ }
+
+ public void setApplicationState(String applicationState) {
+ this.applicationState = applicationState;
+ }
+
+ public String getApplicationName() {
+ return applicationName;
+ }
+
+ public void setApplicationName(String applicationName) {
+ this.applicationName = applicationName;
+ }
+
+ public String getYarnAppType() {
+ return yarnAppType;
+ }
+
+ public void setYarnAppType(String yarnAppType) {
+ this.yarnAppType = yarnAppType;
+ }
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/enums/FlinkManagerActionType.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/enums/FlinkManagerActionType.java
new file mode 100644
index 000000000..2fb6c02b9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/enums/FlinkManagerActionType.java
@@ -0,0 +1,30 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.enums;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.StreamJobLauncherConf;
+import org.apache.linkis.governance.common.enums.OnceJobOperationBoundary;
+
+public enum FlinkManagerActionType {
+
+ STATUS("status"),
+ KILL("kill"),
+ SAVE("doSavepoint"),
+ LIST("list");
+
+ private String name;
+
+ FlinkManagerActionType(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public static OnceJobOperationBoundary getOperationBoundary(FlinkManagerActionType actionType) {
+ if (StreamJobLauncherConf.isPrivateAction(actionType)) {
+ return OnceJobOperationBoundary.EC;
+ } else {
+ return OnceJobOperationBoundary.ECM;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/enums/JobClientType.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/enums/JobClientType.java
new file mode 100644
index 000000000..b431d14d7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/enums/JobClientType.java
@@ -0,0 +1,33 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.enums;
+
+public enum JobClientType {
+
+ ATTACH("attach"),
+ DETACH("detach"),
+ DETACH_STANDALONE("detachStandalone"),
+
+ OTHER("other");
+
+ private String name;
+
+ JobClientType(String name) {
+ this.name = name;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public JobClientType toJobClientType(String s) {
+ if ("attach".equalsIgnoreCase(s)) {
+ return ATTACH;
+ } else if ("detach".equalsIgnoreCase(s)) {
+ return DETACH;
+ } else if ("detachStandalone".equalsIgnoreCase(s)) {
+ return DETACH_STANDALONE;
+ } else {
+ // default
+ return ATTACH;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/FlinkManagerClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/FlinkManagerClient.scala
new file mode 100644
index 000000000..2a79177ce
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/FlinkManagerClient.scala
@@ -0,0 +1,74 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.FlinkManagerActionType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import org.apache.linkis.common.ServiceInstance
+import org.apache.linkis.governance.common.enums.OnceJobOperationBoundary
+
+import java.util
+
+trait FlinkManagerClient {
+
+ def setFlinkManagerEngineConnInstance(ecInstance: ServiceInstance): Unit
+
+ def getFlinkManagerEngineConnInstance(): ServiceInstance
+
+ def setFlinkManagerECMInstance(ecmInstance: ServiceInstance): Unit
+
+ def getFlinkManagerECMInstance(): ServiceInstance
+
+ def executeAction(action: FlinkManagerAction): Any
+
+ def refreshManagerEC(): Unit
+}
+
+trait FlinkManagerAction {
+
+ private var params: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]()
+ private var executeUser: String = _
+ private var playloads: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]()
+
+ def getApplicationId: String
+
+ def getMsg: String
+
+ def getActionType: FlinkManagerActionType
+
+ def getOperationBoundry: OnceJobOperationBoundary
+
+ def getParams(): util.Map[String, AnyRef] = params
+
+ def setParams(params: util.Map[String, AnyRef]): FlinkManagerAction = {
+ this.params = params
+ this
+ }
+
+ def getExecuteUser: String = executeUser
+
+ def setExeuteUser(user: String): FlinkManagerAction = {
+ this.executeUser = user
+ this
+ }
+
+ def setECInstance(ecInstance: ServiceInstance): FlinkManagerAction = {
+ getPlayloads().put(JobConstants.APP_NAME_KEY, ecInstance.getApplicationName)
+ getPlayloads().put(JobConstants.INSTANCE_KEY, ecInstance.getInstance)
+ this
+ }
+
+ def setPlayloads(playloads: util.Map[String, AnyRef]): FlinkManagerAction = {
+ this.playloads = playloads
+ this
+ }
+
+ def getPlayloads(): util.Map[String, AnyRef] = playloads
+
+ def build(): Any /* ={
+ val params = getParams()
+ params.put(JobConstants.APPLICATION_ID_KEY, getApplicationId)
+ params.put(JobConstants.MSG_KEY, getMsg)
+ params.put(JobConstants.FLINK_MANAGER_OPERATION_TYPE_KEY, getActionType.toString)
+ params
+ }*/
+}
+
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala
index bcb1dfbbc..57b4e59fe 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobClient.scala
@@ -29,5 +29,11 @@ trait JobClient[T <: JobInfo] {
*/
def stop(): Unit
+ def init(): Unit = {
+ //Done
+ }
+ def handshake(): Unit = {
+ //Done
+ }
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala
index 6cde5ba12..36ca4e7b3 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/JobInfo.scala
@@ -15,7 +15,7 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.job
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
/**
* Basic job information
@@ -63,4 +63,20 @@ trait JobInfo {
*/
def getJobStates: Array[JobStateInfo]
+ /**
+ * Engine type
+ * @return
+ */
+ def getEngineType: String
+
+ /**
+ * Engine version
+ * @return
+ */
+ def getEngineVersion: String
+ /**
+ * Client type
+ * @return
+ */
+ def getClientType: String
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala
index b18d56091..4f09fa6ee 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/LaunchJob.scala
@@ -28,15 +28,15 @@ trait LaunchJob {
def getSubmitUser: String
- def getLabels: util.Map[String, Any]
+ def getLabels: util.Map[String, AnyRef]
- def getJobContent: util.Map[String, Any]
+ def getJobContent: util.Map[String, AnyRef]
- def getParams: util.Map[String, Any]
+ def getParams: util.Map[String, AnyRef]
- def getSource: util.Map[String, Any]
+ def getSource: util.Map[String, AnyRef]
- def getLaunchConfigs: util.Map[String, Any]
+ def getLaunchConfigs: util.Map[String, AnyRef]
}
@@ -51,11 +51,11 @@ object LaunchJob {
class Builder {
private var submitUser: String = _
private var jobName: String = _
- private var labels: util.Map[String, Any] = _
- private var jobContent: util.Map[String, Any] = _
- private var params: util.Map[String, Any] = _
- private var source: util.Map[String, Any] = _
- private var launchConfigs: util.Map[String, Any] = _
+ private var labels: util.Map[String, AnyRef] = _
+ private var jobContent: util.Map[String, AnyRef] = _
+ private var params: util.Map[String, AnyRef] = _
+ private var source: util.Map[String, AnyRef] = _
+ private var launchConfigs: util.Map[String, AnyRef] = _
def setJobName(jobName: String): this.type = {
this.jobName = jobName
@@ -67,27 +67,27 @@ object LaunchJob {
this
}
- def setLabels(labels: util.Map[String, Any]): this.type = {
+ def setLabels(labels: util.Map[String, AnyRef]): this.type = {
this.labels = labels
this
}
- def setJobContent(jobContent: util.Map[String, Any]): this.type = {
+ def setJobContent(jobContent: util.Map[String, AnyRef]): this.type = {
this.jobContent = jobContent
this
}
- def setParams(param: util.Map[String, Any]): this.type = {
+ def setParams(param: util.Map[String, AnyRef]): this.type = {
this.params = param
this
}
- def setSource(source: util.Map[String, Any]): this.type = {
+ def setSource(source: util.Map[String, AnyRef]): this.type = {
this.source = source
this
}
- def setLaunchConfigs(launchConfigs: util.Map[String, Any]): this.type = {
+ def setLaunchConfigs(launchConfigs: util.Map[String, AnyRef]): this.type = {
this.launchConfigs = launchConfigs
this
}
@@ -101,15 +101,15 @@ object LaunchJob {
def build(): LaunchJob = new LaunchJob {
override def getSubmitUser: String = submitUser
- override def getLabels: util.Map[String, Any] = labels
+ override def getLabels: util.Map[String, AnyRef] = labels
- override def getJobContent: util.Map[String, Any] = jobContent
+ override def getJobContent: util.Map[String, AnyRef] = jobContent
- override def getParams: util.Map[String, Any] = params
+ override def getParams: util.Map[String, AnyRef] = params
- override def getSource: util.Map[String, Any] = source
+ override def getSource: util.Map[String, AnyRef] = source
- override def getLaunchConfigs: util.Map[String, Any] = launchConfigs
+ override def getLaunchConfigs: util.Map[String, AnyRef] = launchConfigs
override def toString: String = s"LaunchJob(submitUser: $submitUser, labels: $labels, jobContent: $jobContent, params: $params, source: $source)"
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/conf/JobConf.scala
similarity index 61%
rename from streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/conf/JobConf.scala
index 89cab6f92..f89be371e 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/conf/JobConf.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/conf/JobConf.scala
@@ -1,23 +1,7 @@
-/*
- * Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.streamis.jobmanager.manager.conf
-
-import org.apache.linkis.common.conf.{CommonVars, TimeType}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
+import org.apache.linkis.common.conf.{CommonVars, Configuration, TimeType}
object JobConf {
@@ -27,21 +11,38 @@ object JobConf {
val STREAMIS_JOB_MONITOR_ENABLE: CommonVars[Boolean] = CommonVars("wds.streamis.job.monitor.enable", true)
- val FLINK_JOB_STATUS_NOT_STARTED: CommonVars[Int] = CommonVars("wds.streamis.job.status.not-started", 0,"Not Started")
+ val STREAMIS_JOB_PARAM_BLANK_PLACEHOLDER: CommonVars[String] = CommonVars("wds.streamis.job.param.blank.placeholder", "\u0001")
- val FLINK_JOB_STATUS_COMPLETED: CommonVars[Int] = CommonVars("wds.streamis.job.status.completed", 1,"Completed")
+ /**
+ * Gateway for stream job log module
+ */
+ val STREAMIS_JOB_LOG_GATEWAY: CommonVars[String] = CommonVars("wds.streamis.job.log.gateway", Configuration.getGateWayURL())
- val FLINK_JOB_STATUS_WAIT_RESTART: CommonVars[Int] = CommonVars("wds.streamis.job.status.wait-restart", 2,"Wait for restart")
+ /**
+ * Path for collecting stream job log
+ */
+ val STREAMIS_JOB_LOG_COLLECT_PATH: CommonVars[String] = CommonVars("wds.streamis.job.log.collect.path", "/api/rest_j/v1/streamis/streamJobManager/log/collect/events")
- val FLINK_JOB_STATUS_ALERT_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.alert-running", 3,"Alert running")
+ /**
+ * Enable to use material container
+ */
+ val STREAMIS_JOB_MATERIAL_CONTAINER_ENABLE: CommonVars[Boolean] = CommonVars("wds.streamis.job.material.container.enable", false)
- val FLINK_JOB_STATUS_SLOW_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.slow-running", 4,"Slow running")
+ val FLINK_JOB_STATUS_NOT_STARTED: CommonVars[Int] = CommonVars("wds.streamis.job.status.not-started", 0, "Not Started")
- val FLINK_JOB_STATUS_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.running", 5,"running")
+ val FLINK_JOB_STATUS_COMPLETED: CommonVars[Int] = CommonVars("wds.streamis.job.status.completed", 1, "Completed")
- val FLINK_JOB_STATUS_FAILED: CommonVars[Int] = CommonVars("wds.streamis.job.status.failed", 6,"Failed")
+ val FLINK_JOB_STATUS_WAIT_RESTART: CommonVars[Int] = CommonVars("wds.streamis.job.status.wait-restart", 2, "Wait for restart")
- val FLINK_JOB_STATUS_STOPPED: CommonVars[Int] = CommonVars("wds.streamis.job.status.stopped", 7,"Stopped")
+ val FLINK_JOB_STATUS_ALERT_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.alert-running", 3, "Alert running")
+
+ val FLINK_JOB_STATUS_SLOW_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.slow-running", 4, "Slow running")
+
+ val FLINK_JOB_STATUS_RUNNING: CommonVars[Int] = CommonVars("wds.streamis.job.status.running", 5, "running")
+
+ val FLINK_JOB_STATUS_FAILED: CommonVars[Int] = CommonVars("wds.streamis.job.status.failed", 6, "Failed")
+
+ val FLINK_JOB_STATUS_STOPPED: CommonVars[Int] = CommonVars("wds.streamis.job.status.stopped", 7, "Stopped")
/**
* Starting (middle status, before scheduling)
@@ -82,4 +83,7 @@ object JobConf {
val TASK_SUBMIT_TIME_MAX: CommonVars[TimeType] = CommonVars("wds.streamis.task.submit.time.max", new TimeType("5m"))
+ val SUPPORTED_JOB_TYPES: CommonVars[String] = CommonVars("wds.streamis.supported.job.types", "flink.jar,flink.sql,spark.jar")
+
+ val SUPPORTED_MANAGEMENT_JOB_TYPES: CommonVars[String] = CommonVars("wds.streamis.management.supported.job.types", "flink.jar,flink.sql")
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/conf/StreamJobLauncherConf.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/conf/StreamJobLauncherConf.scala
new file mode 100644
index 000000000..9459e5a44
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/conf/StreamJobLauncherConf.scala
@@ -0,0 +1,14 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.FlinkManagerActionType
+import org.apache.linkis.common.conf.CommonVars
+
+object StreamJobLauncherConf {
+
+ val FLINK_MANAGER_ACTION_BOUNDARY_PRIVATE_ACTIONS = CommonVars("wds.streamis.flink.manager.action.boundary.private.actions", FlinkManagerActionType.values().map(_.toString).mkString(","))
+
+ def isPrivateAction(actionType: FlinkManagerActionType): Boolean = {
+ FLINK_MANAGER_ACTION_BOUNDARY_PRIVATE_ACTIONS.getValue.split(",").contains(actionType.toString)
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/constants/JobConstants.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/constants/JobConstants.scala
new file mode 100644
index 000000000..ba120d15e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/constants/JobConstants.scala
@@ -0,0 +1,34 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants
+
+object JobConstants {
+
+ val APPLICATION_ID_KEY = "applicationId"
+
+ val FLINK_MANAGER_OPERATION_TYPE_KEY = "operationType"
+
+ val FLINK_OPERATION_BOUNDARY_KEY = "operationBoundary"
+
+ val EC_INSTANCE_KEY = "ecInstance"
+
+ val EC_TICKET_ID_KEY = "ticketId"
+
+ val MSG_KEY = "msg"
+
+ val APP_NAME_KEY = "applicationName"
+
+ val INSTANCE_KEY = "instance"
+
+ val SAVAPOINT_PATH_KEY = "savepointPath"
+
+ val MODE_KEY = "mode"
+
+ val RESULT_SAVEPOINT_PATH_KEY = "writtenSavepoint"
+
+ val RESULT_EC_METRICS_KEY = "ecMetrics"
+
+ val APP_TYPE_FLINK = "Apache Flink"
+
+ val APP_TYPE_SPARK = "SPARK"
+
+ val JOB_NAME_DELIMITER = ","
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/errorcode/JobLaunchErrorCode.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/errorcode/JobLaunchErrorCode.scala
new file mode 100644
index 000000000..2555996af
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/errorcode/JobLaunchErrorCode.scala
@@ -0,0 +1,26 @@
+
+
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.errorcode
+
+object JobLaunchErrorCode {
+
+ val JOB_PARAM_ERROR_CODE = 30505
+
+ val JOB_EC_ERROR_CODE = 30506
+
+ val JOB_EC_STATUS_FETCH_ERROR = 30507
+
+ val JOB_EC_SAVEPOINT_ERROR = 30508
+
+ val JOB_EC_KILL_ERROR = 30509
+
+ val JOB_EC_METRICS_ERROR = 30510
+
+ val JOB_EC_HANDSHAKE_ERROR = 30511
+
+ val JOB_LIST_YARN_APP_ERROR = 30512
+
+ val MANAGER_EC_OPERATE_ERROR = 30513
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/exception/JobErrorException.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/exception/JobErrorException.scala
similarity index 95%
rename from streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/exception/JobErrorException.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/exception/JobErrorException.scala
index c6dd208d1..eb08127f1 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/exception/JobErrorException.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/exception/JobErrorException.scala
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package com.webank.wedatasphere.streamis.jobmanager.manager.exception
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception
import org.apache.linkis.common.exception.ErrorException
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala
index 1a1365dfc..1a2a77a3b 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/manager/JobLaunchManager.scala
@@ -4,6 +4,8 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo, LaunchJob}
import java.util.concurrent.ConcurrentHashMap
+import java.util
+import scala.collection.JavaConverters._
/**
* Basic job manager interface for launching job
@@ -66,4 +68,11 @@ object JobLaunchManager{
def getJobManager(name: String): JobLaunchManager[_ <: JobInfo] = {
launchManagers.get(name)
}
+
+ def getJobManagers: util.List[JobLaunchManager[_ <: JobInfo]] = {
+ launchManagers.asScala.map{
+ case (_, manager) =>
+ manager
+ }.toList.asJava
+ }
}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobGenericState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobGenericState.scala
new file mode 100644
index 000000000..c460c5475
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobGenericState.scala
@@ -0,0 +1,62 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state
+
+import java.net.URI
+
+/**
+ * Generic job state
+ * @param location location
+ */
+class JobGenericState(location: String) extends JobState {
+
+ /**
+ * Restore flag
+ */
+ private var restore: Boolean = false
+
+ private var timestamp: Long = -1
+
+ private var id: String = "{ID}"
+
+ private var metadataInfo: Any = _
+
+ override def getLocation: URI = URI.create(location)
+
+ override def getMetadataInfo: Any = {
+ metadataInfo
+ }
+
+ def setMetadataInfo(metadataInfo: Any): Unit = {
+ this.metadataInfo = metadataInfo
+ }
+
+ /**
+ * Job state id
+ *
+ * @return
+ */
+ override def getId: String = id
+
+ def setId(id: String): Unit = {
+ this.id = id
+ }
+ /**
+ * Timestamp to save the state
+ *
+ * @return
+ */
+ override def getTimestamp: Long = timestamp
+
+ def setTimestamp(timestamp: Long): Unit = {
+ this.timestamp = timestamp
+ }
+
+ /**
+ * If need to restore
+ * @return
+ */
+ override def isRestore: Boolean = this.restore
+
+ override def setToRestore(restore: Boolean): Unit = {
+ this.restore = restore
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala
index b05df81c2..9b4c4dd76 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobState.scala
@@ -30,4 +30,12 @@ trait JobState {
* @return
*/
def getTimestamp: Long
+
+ /**
+ * If need to restore
+ * @return
+ */
+ def isRestore: Boolean
+
+ def setToRestore(restore: Boolean): Unit
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala
index 84e29bf73..dfded4813 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/state/JobStateInfo.scala
@@ -1,5 +1,7 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.job.state
+import org.apache.linkis.common.utils.JsonUtils
+
/**
* Basic info
*/
@@ -14,6 +16,18 @@ class JobStateInfo {
*/
private var timestamp: Long = -1
+ /**
+ * Mark if the state is restored
+ */
+ private var restore: Boolean = false
+
+ def this(location: String, timestamp: Long, restore: Boolean) {
+ this()
+ this.location = location
+ this.timestamp = timestamp
+ this.restore = restore
+ }
+
def setLocation(location: String): Unit = {
this.location = location
}
@@ -25,7 +39,14 @@ class JobStateInfo {
def setTimestamp(timestamp: Long): Unit = {
this.timestamp = timestamp
}
+
def getTimestamp: Long = {
timestamp
}
+
+ def isRestore: Boolean = this.restore
+
+ def setRestore(restore: Boolean): Unit = {
+ this.restore = restore
+ }
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/utils/JobUtils.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/utils/JobUtils.scala
new file mode 100644
index 000000000..cc866f7fe
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-base/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/job/utils/JobUtils.scala
@@ -0,0 +1,7 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.job.utils
+
+object JobUtils {
+
+ def isAnyVal[T](x: T)(implicit m: Manifest[T]) = m <:< manifest[AnyVal]
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml
index fdd13095d..c036da26d 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/pom.xml
@@ -20,7 +20,7 @@
streamis-job-launcher
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
@@ -42,6 +42,17 @@
linkis-computation-client
${linkis.version}
+
+ org.apache.linkis
+ linkis-manager-common
+ ${linkis.version}
+
+
+
+
+
+
+
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java
index 4a423326a..fcdc1c584 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/AbstractLinkisJobStateFetcher.java
@@ -1,7 +1,7 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
@@ -19,7 +19,6 @@
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager;
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState;
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateFetcher;
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration;
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException;
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.StreamisJobLaunchException;
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client.StateFileTree;
@@ -36,13 +35,11 @@
import org.slf4j.LoggerFactory;
import java.io.IOException;
-import java.net.URI;
import java.util.HashMap;
-import java.util.List;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicReference;
-import java.util.function.Function;
+import java.util.function.Predicate;
/**
@@ -70,13 +67,13 @@ public abstract class AbstractLinkisJobStateFetcher implemen
/**
* Http Client
*/
- Client client;
+ protected Client client;
private final Class stateClass;
private final JobStateManager jobStateManager;
- public AbstractLinkisJobStateFetcher(Class stateClass, JobStateManager jobStateManager){
+ protected AbstractLinkisJobStateFetcher(Class stateClass, JobStateManager jobStateManager){
this.stateClass = stateClass;
this.jobStateManager = jobStateManager;
}
@@ -130,12 +127,12 @@ public void destroy() {
* @param resolved resolved
* @return
*/
- private StateFileTree traverseFileTreeToFind(JobInfo jobInfo, StateFileTree stateFileTree, Function matcher,
+ private StateFileTree traverseFileTreeToFind(JobInfo jobInfo, StateFileTree stateFileTree, Predicate matcher,
boolean resolved){
AtomicReference latestFileTree = new AtomicReference<>(new StateFileTree());
if (Objects.nonNull(stateFileTree)){
if (!resolved && stateFileTree.getIsLeaf()){
- if (matcher.apply(stateFileTree.getPath()) && compareTime(stateFileTree, latestFileTree.get()) > 0){
+ if (matcher.test(stateFileTree.getPath()) && compareTime(stateFileTree, latestFileTree.get()) > 0){
latestFileTree.set(stateFileTree);
}
} else if (!stateFileTree.getIsLeaf()){
@@ -145,7 +142,7 @@ private StateFileTree traverseFileTreeToFind(JobInfo jobInfo, StateFileTree stat
Objects.nonNull(childStateFileTree.getChildren())? childStateFileTree : getDirFileTree(jobInfo, childStateFileTree.getPath()),
matcher,
true);
- if (compareTime(candidateFileTree, latestFileTree.get()) > 0 && matcher.apply(candidateFileTree.getPath())){
+ if (compareTime(candidateFileTree, latestFileTree.get()) > 0 && matcher.test(candidateFileTree.getPath())){
latestFileTree.set(candidateFileTree);
}
}));
@@ -206,7 +203,8 @@ private void checkFetchStateResult(DWSResult result) throws FlinkJobStateFetchEx
* @return size
*/
private long compareTime(StateFileTree leftTree, StateFileTree rightTree){
- long leftTime = 0L,rightTime = 0L;
+ long leftTime = 0L;
+ long rightTime = 0L;
try {
leftTime = Long.parseLong(Optional.ofNullable(leftTree.getProperties()).orElse(new HashMap<>()).getOrDefault(PROPS_MODIFY_TIME, "0"));
} catch (NumberFormatException e){
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkCheckpointJobStateFetcher.java
similarity index 83%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkCheckpointJobStateFetcher.java
index 377c525c3..c5fc0f736 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/CheckpointJobStateFetcher.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkCheckpointJobStateFetcher.java
@@ -23,7 +23,6 @@
import java.net.URI;
import java.net.URISyntaxException;
-import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.JobStateConf.CHECKPOINT_PATH_PATTERN;
@@ -31,13 +30,13 @@
/**
* Checkpoint JobState Fetcher
*/
-public class CheckpointJobStateFetcher extends AbstractLinkisJobStateFetcher {
+public class FlinkCheckpointJobStateFetcher extends AbstractLinkisJobStateFetcher {
- private static final Logger LOG = LoggerFactory.getLogger(CheckpointJobStateFetcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlinkCheckpointJobStateFetcher.class);
private static final Pattern PATH_PATTERN = Pattern.compile(CHECKPOINT_PATH_PATTERN.getValue());
- public CheckpointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) {
+ public FlinkCheckpointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) {
super(stateClass, jobStateManager);
}
@@ -47,8 +46,7 @@ protected boolean isMatch(String path) {
}
@Override
- public Checkpoint getState(JobStateFileInfo fileInfo) {
- // TODO from linkis will lost the authority info
+ public FlinkCheckpoint getState(JobStateFileInfo fileInfo) {
URI location = URI.create(fileInfo.getPath());
if (StringUtils.isBlank(location.getAuthority()) &&
StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue())){
@@ -59,7 +57,7 @@ public Checkpoint getState(JobStateFileInfo fileInfo) {
throw new StreamisJobLaunchException.Runtime(-1, "Fail to resolve checkpoint location, message: " + e.getMessage(), e);
}
}
- Checkpoint checkpoint = new Checkpoint(location.toString());
+ FlinkCheckpoint checkpoint = new FlinkCheckpoint(location.toString());
checkpoint.setMetadataInfo(fileInfo);
checkpoint.setTimestamp(fileInfo.getModifytime());
LOG.info("Checkpoint info is [path: {}, timestamp: {}]" ,checkpoint.getLocation(), checkpoint.getTimestamp());
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkSavepointJobStateFetcher.java
similarity index 84%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkSavepointJobStateFetcher.java
index 69dbb51d0..f70b3f6b9 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/SavepointJobStateFetcher.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkSavepointJobStateFetcher.java
@@ -30,13 +30,13 @@
/**
* Savepoint JobState Fetcher
*/
-public class SavepointJobStateFetcher extends AbstractLinkisJobStateFetcher{
+public class FlinkSavepointJobStateFetcher extends AbstractLinkisJobStateFetcher{
- private static final Logger LOG = LoggerFactory.getLogger(CheckpointJobStateFetcher.class);
+ private static final Logger LOG = LoggerFactory.getLogger(FlinkCheckpointJobStateFetcher.class);
private static final Pattern PATH_PATTERN = Pattern.compile(SAVEPOINT_PATH_PATTERN.getValue());
- public SavepointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) {
+ public FlinkSavepointJobStateFetcher(Class stateClass, JobStateManager jobStateManager) {
super(stateClass, jobStateManager);
}
@@ -46,8 +46,7 @@ protected boolean isMatch(String path) {
}
@Override
- protected Savepoint getState(JobStateFileInfo fileInfo) {
- // TODO from linkis will lost the authority info
+ protected FlinkSavepoint getState(JobStateFileInfo fileInfo) {
URI location = URI.create(fileInfo.getPath());
if (StringUtils.isBlank(location.getAuthority()) &&
StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_STATE_DEFAULT_AUTHORITY().getValue())){
@@ -58,7 +57,7 @@ protected Savepoint getState(JobStateFileInfo fileInfo) {
throw new StreamisJobLaunchException.Runtime(-1, "Fail to resolve checkpoint location, message: " + e.getMessage(), e);
}
}
- Savepoint savepoint = new Savepoint(location.toString());
+ FlinkSavepoint savepoint = new FlinkSavepoint(location.toString());
savepoint.setMetadataInfo(fileInfo);
savepoint.setTimestamp(fileInfo.getModifytime());
LOG.info("Savepoint info is [path: {}, timestamp: {}]", savepoint.getLocation(), savepoint.getTimestamp());
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java
index be4f27ee0..6c22ed2f8 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/JobStateConf.java
@@ -21,6 +21,8 @@
*/
public class JobStateConf {
+ private JobStateConf() {}
+
public static final CommonVars CHECKPOINT_PATH_PATTERN = CommonVars.apply("wds.streamis.job.state.checkpoint.path-pattern", "^[\\s\\S]+?/\\w+?/chk-\\d+/_metadata$");
public static final CommonVars SAVEPOINT_PATH_PATTERN = CommonVars.apply("wds.streamis.job.state.savepoint.path-pattern", "^[\\s\\S]+?/savepoint-[\\w-]+/_metadata$");
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java
index 48c6d7a6a..57dfd6328 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/LinkisJobStateResult.java
@@ -32,6 +32,14 @@ public class LinkisJobStateResult extends AbstractJobStateResult {
private Map dirFileTrees = new HashMap<>();
+ public Map getDirFileTrees() {
+ return dirFileTrees;
+ }
+
+ public void setDirFileTrees(Map dirFileTrees) {
+ this.dirFileTrees = dirFileTrees;
+ }
+
/**
* Convert the result data to state file tree
* @return state file tree
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java
index 3844970c4..e193aeb66 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/client/StateFileTree.java
@@ -1,13 +1,13 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.client;
-import java.util.HashMap;
import java.util.List;
+import java.util.Map;
public class StateFileTree {
private String name;
private String path;
- private HashMap properties;
+ private Map properties;
private List children;
private Boolean isLeaf = false;
private String parentPath;
@@ -44,11 +44,11 @@ public void setPath(String path) {
this.path = path;
}
- public HashMap getProperties() {
+ public Map getProperties() {
return properties;
}
- public void setProperties(HashMap properties) {
+ public void setProperties(Map properties) {
this.properties = properties;
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java
index 78701f646..1352c8b28 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/url/LinkisURLStreamHandlerFactory.java
@@ -15,7 +15,6 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url;
-import org.apache.commons.lang.StringUtils;
import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory;
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/GsonUtil.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/GsonUtil.java
new file mode 100644
index 000000000..5fbccd425
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/GsonUtil.java
@@ -0,0 +1,72 @@
+/*
+ *
+ * Copyright 2020 WeBank
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils;
+
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.reflect.TypeToken;
+
+import java.util.List;
+
+public class GsonUtil {
+
+ private GsonUtil() {}
+
+ private static Gson gson;
+ static{
+ GsonBuilder builder = new GsonBuilder();
+ gson = builder.enableComplexMapKeySerialization()
+ .setPrettyPrinting()
+ .create();
+ }
+
+ /**
+ * use gson.fromJson(json, type) simplify
+ * @param json json string
+ * @param clazz type
+ * @param actual need type
+ * @return deserialized object
+ */
+ public static T fromJson(String json, Class> clazz ){
+ if(json.startsWith("[") && json.endsWith("]")){
+ return gson.fromJson(json, TypeToken.getParameterized(List.class, clazz).getType());
+ }
+ return gson.fromJson(json, TypeToken.getParameterized(clazz).getType());
+ }
+
+ /**
+ * use gson.fromJson(json, type) simplify
+ * @param json json string
+ * @param rawClass raw class
+ * @param genericArguments generic arguments
+ * @param
+ * @return
+ */
+ public static T fromJson(String json, Class> rawClass, Class>... genericArguments){
+ return gson.fromJson(json, TypeToken.getParameterized(rawClass, genericArguments).getType());
+ }
+
+ /**
+ * use gson.toJson(src) simplify
+ * @param src source obj
+ * @return json
+ */
+ public static String toJson(Object src){
+ return gson.toJson(src);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/HttpClientUtil.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/HttpClientUtil.java
new file mode 100644
index 000000000..f0a5d1229
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/HttpClientUtil.java
@@ -0,0 +1,187 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobErrorException;
+import org.apache.http.Consts;
+import org.apache.http.HttpEntity;
+import org.apache.http.HttpStatus;
+import org.apache.http.auth.AuthScope;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CredentialsProvider;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.client.methods.*;
+import org.apache.http.impl.client.BasicCredentialsProvider;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.util.EntityUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Objects;
+import java.util.Properties;
+import java.util.concurrent.ThreadPoolExecutor;
+
+/**
+ * @author jefftlin
+ * @create 2022-12-23
+ **/
+public class HttpClientUtil {
+
+ private HttpClientUtil() {}
+
+ private static Logger logger = LoggerFactory.getLogger(HttpClientUtil.class);
+
+
+ private static String SPARK_SECRET_PATH = "";
+
+ private static int HTTP_TIMEOUT_IN_MILLISECONDS = 5000;
+
+ private static final int POOL_SIZE = 20;
+
+ private static ThreadPoolExecutor asyncExecutor = RetryUtil.createThreadPoolExecutor();
+
+ private static Properties properties;
+
+ public static void setHttpTimeoutInMillionSeconds(int httpTimeoutInMillionSeconds) {
+ HTTP_TIMEOUT_IN_MILLISECONDS = httpTimeoutInMillionSeconds;
+ }
+
+ /**
+ * Register httpclient by engineType
+ *
+ * @param prop
+ * @return
+ */
+ public static synchronized CloseableHttpClient createHttpClientUtil(Properties prop) {
+ CredentialsProvider provider = new BasicCredentialsProvider();
+ if (Objects.nonNull(prop)) {
+ provider.setCredentials(AuthScope.ANY,
+ new UsernamePasswordCredentials(prop.getProperty("auth.key"), prop.getProperty("auth.pass")));
+ }
+
+ RequestConfig requestConfig = RequestConfig.custom().setSocketTimeout(HTTP_TIMEOUT_IN_MILLISECONDS)
+ .setConnectTimeout(HTTP_TIMEOUT_IN_MILLISECONDS).setConnectionRequestTimeout(HTTP_TIMEOUT_IN_MILLISECONDS)
+ .setStaleConnectionCheckEnabled(true).build();
+
+ return HttpClientBuilder.create().setMaxConnTotal(POOL_SIZE).setMaxConnPerRoute(POOL_SIZE)
+ .setDefaultRequestConfig(requestConfig).setDefaultCredentialsProvider(provider).build();
+ }
+
+ public void destroy(CloseableHttpClient httpClient) {
+ destroyApacheHttpClient(httpClient);
+ }
+
+ /**
+ * Destroy
+ *
+ */
+ public static void destroyApacheHttpClient(CloseableHttpClient httpClient) {
+ try {
+ if (httpClient != null) {
+ httpClient.close();
+ }
+ } catch (IOException e) {
+ logger.error(e.getMessage(), e);
+ }
+ }
+
+ public static HttpGet getGetRequest() {
+ return new HttpGet();
+ }
+
+ public static HttpPost getPostRequest(String uri, HttpEntity entity, String... headers) {
+ HttpPost httpPost = new HttpPost(uri);
+ httpPost.setEntity(entity);
+ if(headers.length % 2 == 0){
+ for(int i = 0; i < headers.length; i++){
+ httpPost.addHeader(headers[i], headers[++i]);
+ }
+ }
+ return httpPost;
+ }
+
+ public static HttpPut getPutRequest() {
+ return new HttpPut();
+ }
+
+ public static HttpDelete getDeleteRequest() {
+ return new HttpDelete();
+ }
+
+ /**
+ * Synchronous execution
+ *
+ * @param httpClient
+ * @param httpRequestBase
+ * @param type
+ * @return
+ * @param
+ * @throws Exception
+ */
+ public T executeAndGet(CloseableHttpClient httpClient, HttpRequestBase httpRequestBase, Class type) throws IOException {
+ return httpClient.execute(httpRequestBase, httpResponse -> {
+ if (httpResponse.getStatusLine().getStatusCode() != HttpStatus.SC_OK) {
+ logger.info("Request path: " + httpRequestBase.getURI() + ", method:" + httpRequestBase.getMethod()
+ + ",STATUS CODE = " + httpResponse.getStatusLine().getStatusCode());
+ httpRequestBase.abort();
+ throw new RuntimeException("Response Status Code : " + httpResponse.getStatusLine().getStatusCode());
+ } else {
+ HttpEntity entity = httpResponse.getEntity();
+ if (entity != null) {
+ String entityString = EntityUtils.toString(entity, Consts.UTF_8);
+ if(type.equals(String.class)){
+ return (T)entityString;
+ }
+ return GsonUtil.fromJson(entityString, type);
+ } else {
+ throw new RuntimeException("Response Entity Is Null");
+ }
+ }
+ });
+ }
+
+ /**
+ * Asynchronous execution
+ *
+ * @param httpClient
+ * @param httpRequestBase
+ * @param type
+ * @param retryTimes
+ * @param retryInterval
+ * @return
+ * @param
+ */
+ public T executeAndGetWithRetry(CloseableHttpClient httpClient, final HttpRequestBase httpRequestBase,
+ Class type, final int retryTimes, final long retryInterval) throws JobErrorException {
+ try {
+ return RetryUtil.asyncExecuteWithRetry(() -> executeAndGet(httpClient, httpRequestBase, type),
+ retryTimes, retryInterval, true, HTTP_TIMEOUT_IN_MILLISECONDS + 1000L, asyncExecutor);
+ } catch (Exception e) {
+ throw new JobErrorException(-1,e.getMessage());
+ }
+ }
+
+ public static synchronized Properties getSecurityProperties() throws JobErrorException {
+ if (properties == null) {
+ InputStream secretStream = null;
+ try {
+ secretStream = new FileInputStream(SPARK_SECRET_PATH);
+ } catch (FileNotFoundException e) {
+ throw new JobErrorException(-1,"Spark配置要求加解密,但无法找到密钥的配置文件");
+ }
+
+ properties = new Properties();
+ try {
+ properties.load(secretStream);
+ secretStream.close();
+ } catch (IOException e) {
+ throw new JobErrorException(-1,"读取加解密配置文件出错");
+ }
+ }
+
+ return properties;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/RetryUtil.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/RetryUtil.java
new file mode 100644
index 000000000..18c520ced
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/RetryUtil.java
@@ -0,0 +1,218 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.*;
+
+/**
+ * @author jefftlin
+ * @create 2022-12-23
+ **/
+public class RetryUtil {
+
+ private RetryUtil() {
+ }
+
+ private static final Logger LOG = LoggerFactory.getLogger(RetryUtil.class);
+
+ private static final long MAX_SLEEP_MILLISECOND = 256L * 1000L;
+
+ /**
+ * 重试次数工具方法.
+ *
+ * @param callable 实际逻辑
+ * @param retryTimes 最大重试次数(>1)
+ * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试
+ * @param exponential 休眠时间是否指数递增
+ * @param 返回值类型
+ * @return 经过重试的callable的执行结果
+ */
+ public static T executeWithRetry(Callable callable,
+ int retryTimes,
+ long sleepTimeInMilliSecond,
+ boolean exponential) throws Exception {
+ Retry retry = new Retry();
+ return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null);
+ }
+
+ /**
+ * 重试次数工具方法.
+ *
+ * @param callable 实际逻辑
+ * @param retryTimes 最大重试次数(>1)
+ * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试
+ * @param exponential 休眠时间是否指数递增
+ * @param 返回值类型
+ * @param retryExceptionClasss 出现指定的异常类型时才进行重试
+ * @return 经过重试的callable的执行结果
+ */
+ public static T executeWithRetry(Callable callable,
+ int retryTimes,
+ long sleepTimeInMilliSecond,
+ boolean exponential,
+ List> retryExceptionClasss) throws Exception {
+ Retry retry = new Retry();
+ return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, retryExceptionClasss);
+ }
+
+ /**
+ * 在外部线程执行并且重试。每次执行需要在timeoutMs内执行完,不然视为失败。
+ * 执行异步操作的线程池从外部传入,线程池的共享粒度由外部控制。比如,HttpClientUtil共享一个线程池。
+ *
+ * 限制条件:仅仅能够在阻塞的时候interrupt线程
+ *
+ * @param callable 实际逻辑
+ * @param retryTimes 最大重试次数(>1)
+ * @param sleepTimeInMilliSecond 运行失败后休眠对应时间再重试
+ * @param exponential 休眠时间是否指数递增
+ * @param timeoutMs callable执行超时时间,毫秒
+ * @param executor 执行异步操作的线程池
+ * @param 返回值类型
+ * @return 经过重试的callable的执行结果
+ */
+ public static T asyncExecuteWithRetry(Callable callable,
+ int retryTimes,
+ long sleepTimeInMilliSecond,
+ boolean exponential,
+ long timeoutMs,
+ ThreadPoolExecutor executor) throws Exception {
+ Retry retry = new AsyncRetry(timeoutMs, executor);
+ return retry.doRetry(callable, retryTimes, sleepTimeInMilliSecond, exponential, null);
+ }
+
+ /**
+ * 创建异步执行的线程池。特性如下:
+ * core大小为0,初始状态下无线程,无初始消耗。
+ * max大小为5,最多五个线程。
+ * 60秒超时时间,闲置超过60秒线程会被回收。
+ * 使用SynchronousQueue,任务不会排队,必须要有可用线程才能提交成功,否则会RejectedExecutionException。
+ *
+ * @return 线程池
+ */
+ public static ThreadPoolExecutor createThreadPoolExecutor() {
+ return new ThreadPoolExecutor(0, 5,
+ 60L, TimeUnit.SECONDS,
+ new SynchronousQueue<>());
+ }
+
+
+ private static class Retry {
+
+ public T doRetry(Callable callable, int retryTimes, long sleepTimeInMilliSecond, boolean exponential, List> retryExceptionClasss)
+ throws Exception {
+
+ if (null == callable) {
+ throw new IllegalArgumentException("系统编程错误, 入参callable不能为空 ! ");
+ }
+ if (retryTimes < 1) {
+ throw new IllegalArgumentException(String.format("系统编程错误, 入参retrytime[%d]不能小于1 !", retryTimes));
+ }
+ Exception saveException = null;
+ for (int i = 0; i < retryTimes; i++) {
+ try {
+ return call(callable);
+ } catch (Exception e) {
+ saveException = e;
+ if (i == 0) LOG.error(String.format("Exception when calling callable, 异常Msg:%s", saveException.getMessage()), saveException);
+ checkRetryException(retryExceptionClasss, saveException);
+ realTimeSleep(i,retryTimes,sleepTimeInMilliSecond,exponential,e);
+ }
+ }
+ throw saveException;
+ }
+
+ private static void realTimeSleep(int i, long retryTimes, long sleepTimeInMilliSecond, boolean exponential, Exception e){
+ if (i + 1 < retryTimes && sleepTimeInMilliSecond > 0) {
+ long startTime = System.currentTimeMillis();
+ long timeToSleep = timeToSleep(exponential, sleepTimeInMilliSecond, i);
+ try {
+ Thread.sleep(timeToSleep);
+ } catch (InterruptedException ignored) {
+
+ }
+ long realTimeSleep = System.currentTimeMillis() - startTime;
+ LOG.error(String.format("Exception when calling callable, 即将尝试执行第%s次重试.本次重试计划等待[%s]ms,实际等待[%s]ms, 异常Msg:[%s]",
+ i + 1, timeToSleep, realTimeSleep, e.getMessage()));
+ }
+ }
+
+ private static void checkRetryException(List> retryExceptionClasss, Exception saveException) throws Exception {
+ if (null != retryExceptionClasss && !retryExceptionClasss.isEmpty()) {
+ boolean needRetry = false;
+ for (Class> eachExceptionClass : retryExceptionClasss) {
+ if (eachExceptionClass == saveException.getClass()) {
+ needRetry = true;
+ break;
+ }
+ }
+ if (!needRetry) {
+ throw saveException;
+ }
+ }
+ }
+
+ private static Long timeToSleep(boolean exponential, long sleepTimeInMilliSecond, int i) {
+ long timeToSleep;
+ if (exponential) {
+ timeToSleep = sleepTimeInMilliSecond * (long) Math.pow(2, i);
+ if (timeToSleep >= MAX_SLEEP_MILLISECOND) {
+ timeToSleep = MAX_SLEEP_MILLISECOND;
+ }
+ } else {
+ timeToSleep = sleepTimeInMilliSecond;
+ if (timeToSleep >= MAX_SLEEP_MILLISECOND) {
+ timeToSleep = MAX_SLEEP_MILLISECOND;
+ }
+ }
+ return timeToSleep;
+ }
+
+ protected T call(Callable callable) throws Exception {
+ return callable.call();
+ }
+ }
+
+
+ private static class AsyncRetry extends Retry {
+
+ private long timeoutMs;
+ private ThreadPoolExecutor executor;
+
+ public AsyncRetry(long timeoutMs, ThreadPoolExecutor executor) {
+ this.timeoutMs = timeoutMs;
+ this.executor = executor;
+ }
+
+ /**
+ * 使用传入的线程池异步执行任务,并且等待。
+ *
+ * future.get()方法,等待指定的毫秒数。如果任务在超时时间内结束,则正常返回。
+ * 如果抛异常(可能是执行超时、执行异常、被其他线程cancel或interrupt),都记录日志并且网上抛异常。
+ * 正常和非正常的情况都会判断任务是否结束,如果没有结束,则cancel任务。cancel参数为true,表示即使
+ * 任务正在执行,也会interrupt线程。
+ *
+ * @param callable
+ * @param
+ * @return
+ * @throws Exception
+ */
+ @Override
+ protected T call(Callable callable) throws Exception {
+ Future future = executor.submit(callable);
+ try {
+ return future.get(timeoutMs, TimeUnit.MILLISECONDS);
+ } catch (Exception e) {
+ LOG.warn("Try once failed", e);
+ throw e;
+ } finally {
+ if (!future.isDone()) {
+ future.cancel(true);
+ LOG.warn("Try once task not done, cancel it, active count: " + executor.getActiveCount());
+ }
+ }
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala
index 812475016..2e35a7317 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/conf/JobLauncherConfiguration.scala
@@ -1,6 +1,7 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf
import org.apache.linkis.common.conf.CommonVars
+import org.apache.linkis.governance.common.conf.GovernanceCommonConf
/**
* Job Launcher configuration
@@ -49,4 +50,27 @@ object JobLauncherConfiguration {
*/
val VAR_FLINK_APP_NAME: CommonVars[String] = CommonVars("wds.streamis.launch.variable.flink.app.name", "flink.app.name")
+ val FLINK_MANAGER_EC_SUBMIT_USER: CommonVars[String] = CommonVars("wds.streamis.launch.manager.ec.submit.user", "hadoop")
+
+ val FLINK_MANAGER_EC_SUBMIT_CREATOR : CommonVars[String] = CommonVars("wds.streamis.launch.manager.ec.submit.creator", "Streamis")
+
+ val FLINK_MANAGER_EC_TENANT: CommonVars[String] = CommonVars("wds.streamis.launch.manager.ec.tenant", null)
+
+ val FLINK_ONCE_JOB_MAX_SUBMIT_TIME_MILLS: CommonVars[Long] = CommonVars("wds.streamis.launch.oncejob.max_submit_time.mills", 300000)
+
+ val FLINK_MANAGER_EC_REFRESH_INTERVAL: CommonVars[Long] = CommonVars("wds.streamis.launch.manager.ec.refresh.interval.mills", 10 * 60 * 1000)
+
+ val MANAGER_MODE_KEY: CommonVars[String] = CommonVars("wds.streamis.job.manager.mode.key", GovernanceCommonConf.EC_APP_MANAGE_MODE.key)
+
+ val FLINK_MANAGER_EC_KEY: CommonVars[String] = CommonVars("linkis.flink.manager.ec.key", "linkis.flink.manager.mode.on")
+
+ val ENABLE_FLINK_MANAGER_EC_ENABLE: CommonVars[Boolean] = CommonVars("wds.streamis.flink.manager.ec.enable", true)
+
+ val FLINKK_MANAGER_EXIT_TIME: CommonVars[Long] = CommonVars("wds.streamis.flink.manager.ec.expire.time.mills", 12 * 3600 * 1000)
+
+ val LINKIS_EC_EXPIRE_TIME_KEY: CommonVars[String] = CommonVars("linkis.ec.expire.key", "wds.linkis.engineconn.max.free.time")
+
+ val ENABLE_FLINK_LIST_INSPECT: CommonVars[Boolean] = CommonVars("wds.streamis.job.inspect.list.enable", true)
+
+ val LINKIS_EC_SUPPORT_PARALLEM = "wds.linkis.engineconn.support.parallelism"
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala
index 2fdb12b7b..d6fb5c661 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/core/FlinkLogIterator.scala
@@ -18,6 +18,7 @@ trait FlinkLogIterator extends Iterator[String] with Closeable {
val engineConnLogOperator: EngineConnLogOperator
def init(): Unit
def getLogPath: String
+ def getLogDirSuffix: String
def getLogs: util.ArrayList[String]
def getEndLine: Long
}
@@ -28,6 +29,7 @@ class SimpleFlinkJobLogIterator(override val requestPayload: LogRequestPayload,
private var logs: util.ArrayList[String] = _
private var index = 0
private var logPath: String = _
+ private var logDirSuffix: String = _
private var isClosed = true
private var endLine = 0
@@ -69,4 +71,8 @@ class SimpleFlinkJobLogIterator(override val requestPayload: LogRequestPayload,
override def getLogs: util.ArrayList[String] = logs
override def getEndLine: Long = endLine
+
+ def setLogDirSuffix(logDirSuffix: String) : Unit = this.logDirSuffix = logDirSuffix
+
+ override def getLogDirSuffix: String = logDirSuffix
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala
index 29f90d325..20cb4d081 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/entity/LogRequestPayload.scala
@@ -14,6 +14,7 @@ class LogRequestPayload {
private var onlyKeywords: String = _
private var lastRows = 0
private var logType: String = _
+ private var logHistory: Boolean = false
def getPageSize: Int = pageSize
def setPageSize(pageSize: Int): Unit = this.pageSize = pageSize
@@ -32,4 +33,8 @@ class LogRequestPayload {
def getLogType: String = logType
def setLogType(logType: String): Unit = this.logType = logType
+
+ def isLogHistory: Boolean = logHistory
+
+ def setLogHistory(logHistory: Boolean): Unit = this.logHistory = logHistory
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala
index d3dddbc4d..daec1f37a 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/exception/FlinkJobLaunchErrorException.scala
@@ -15,6 +15,7 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.errorcode.JobLaunchErrorCode
import org.apache.linkis.common.exception.ErrorException
/**
@@ -29,15 +30,29 @@ class FlinkJobLaunchErrorException(errorCode: Int, errorMsg: String, t: Throwabl
/**
* Exception in triggering savepoint
*/
-class FlinkSavePointException(errorCode: Int, errorMsg: String, t: Throwable)
+class FlinkSavePointException(errorCode: Int = JobLaunchErrorCode.JOB_EC_SAVEPOINT_ERROR, errorMsg: String, t: Throwable)
extends FlinkJobLaunchErrorException(errorCode, errorMsg, t)
/**
* Exception in fetching job state
*/
-class FlinkJobStateFetchException(errorCode: Int, errorMsg: String, t: Throwable)
+class FlinkJobStateFetchException(errorCode: Int = JobLaunchErrorCode.JOB_EC_STATUS_FETCH_ERROR, errorMsg: String, t: Throwable)
extends FlinkJobLaunchErrorException(errorCode, errorMsg, t)
class FlinkJobLogFetchException(errorCode: Int, errorMsg: String, t: Throwable)
extends FlinkJobLaunchErrorException(errorCode, errorMsg, t)
+class FlinkJobParamErrorException(errorMsg: String, t: Throwable = null)
+extends FlinkJobLaunchErrorException(JobLaunchErrorCode.JOB_PARAM_ERROR_CODE, errorMsg, t)
+
+class FlinkJobFlinkECErrorException(errorMsg: String, t: Throwable = null)
+extends FlinkJobLaunchErrorException(JobLaunchErrorCode.JOB_EC_ERROR_CODE, errorMsg, t)
+
+class FlinkJobKillECErrorException(errorMsg: String, t: Throwable = null)
+extends FlinkJobLaunchErrorException(JobLaunchErrorCode.JOB_EC_KILL_ERROR, errorMsg, t)
+
+class FlinkECHandshakeErrorException(errorMsg: String, t: Throwable = null)
+extends FlinkJobLaunchErrorException(JobLaunchErrorCode.JOB_EC_HANDSHAKE_ERROR, errorMsg, t)
+
+class FlinkECOperateErrorException(errorMsg: String, t: Throwable = null)
+extends FlinkJobLaunchErrorException(JobLaunchErrorCode.MANAGER_EC_OPERATE_ERROR, errorMsg, t)
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/action/AbstractFlinkManagerAction.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/action/AbstractFlinkManagerAction.scala
new file mode 100644
index 000000000..c41afd3f6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/action/AbstractFlinkManagerAction.scala
@@ -0,0 +1,92 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.action
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.FlinkManagerActionType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.FlinkManagerAction
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import org.apache.linkis.computation.client.once.action.{ECMOperateAction, EngineConnOperateAction}
+import org.apache.linkis.governance.common.constant.ec.ECConstants
+import org.apache.linkis.governance.common.enums.OnceJobOperationBoundary
+import java.util
+
+import scala.collection.JavaConverters.mapAsScalaMapConverter
+
+abstract class AbstractFlinkManagerAction extends FlinkManagerAction {
+
+ override def build(): EngineConnOperateAction = {
+ val operateAction: EngineConnOperateAction = getOperationBoundry match {
+ case OnceJobOperationBoundary.ECM =>
+ new ECMOperateAction()
+ case OnceJobOperationBoundary.EC =>
+ new EngineConnOperateAction()
+ }
+
+ val operateNameKey = EngineConnOperateAction.OPERATOR_NAME_KEY
+
+ // inner params to ec
+ val params = getParams()
+ params.put(JobConstants.MSG_KEY, getMsg)
+ params.put(operateNameKey, getActionType.getName)
+ params.put(JobConstants.FLINK_OPERATION_BOUNDARY_KEY, getOperationBoundry.toString)
+ params.put(JobConstants.APPLICATION_ID_KEY, getApplicationId)
+ // outer params to manager
+ operateAction.addRequestPayload("parameters", params)
+ getPlayloads().asScala.foreach{kv => operateAction.addRequestPayload(kv._1, kv._2)}
+ operateAction.setUser(getExecuteUser)
+ operateAction
+ }
+
+ override def getOperationBoundry: OnceJobOperationBoundary = FlinkManagerActionType.getOperationBoundary(getActionType)
+}
+
+
+class FlinkStatusAction(applicationId: String, msg: String) extends AbstractFlinkManagerAction {
+ override def getApplicationId: String = applicationId
+
+ override def getMsg: String = msg
+
+ override def getActionType: FlinkManagerActionType = FlinkManagerActionType.STATUS
+
+}
+
+class FlinkKillAction(applicationId: String, msg: String) extends AbstractFlinkManagerAction {
+ override def getApplicationId: String = applicationId
+
+ override def getMsg: String = msg
+
+ override def getActionType: FlinkManagerActionType = FlinkManagerActionType.KILL
+}
+
+class FlinkSaveAction(applicationId: String, msg: String) extends AbstractFlinkManagerAction {
+ override def getApplicationId: String = applicationId
+
+ override def getMsg: String = msg
+
+ override def getActionType: FlinkManagerActionType = FlinkManagerActionType.SAVE
+
+ def setSavepointPath(path: String): Unit = getParams().put(JobConstants.SAVAPOINT_PATH_KEY, path)
+
+ def setMode(mode: String): Unit = getParams().put(JobConstants.MODE_KEY, mode)
+}
+
+class ListYarnAppAction(appName: String, user: String, msg: String, appTypeList: util.List[String]) extends AbstractFlinkManagerAction {
+
+ override def getApplicationId: String = null
+
+ override def getMsg: String = msg
+
+ override def getActionType: FlinkManagerActionType = FlinkManagerActionType.LIST
+
+ def addAppType(appType: String): Unit = {
+ val appTypeList = getParams().getOrDefault(ECConstants.YARN_APP_TYPE_LIST_KEY, new util.ArrayList[String]).asInstanceOf[util.List[String]]
+ getParams().putIfAbsent(ECConstants.YARN_APP_STATE_LIST_KEY, appTypeList)
+ if (!appTypeList.contains(appType)) {
+ appTypeList.add(appType)
+ }
+ }
+
+ getParams().put(ECConstants.YARN_APP_NAME_KEY, appName)
+
+ getParams().put(JobConstants.MSG_KEY, msg)
+
+ getParams().put(ECConstants.YARN_APP_TYPE_LIST_KEY, appTypeList)
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/AbstractJobClient.scala
similarity index 50%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/AbstractJobClient.scala
index 951145b94..e08fc66bc 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobClient.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/AbstractJobClient.scala
@@ -13,39 +13,33 @@
* limitations under the License.
*/
-package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobClient
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo}
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core.{FlinkLogIterator, SimpleFlinkJobLogIterator}
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.{FlinkJobLaunchErrorException, FlinkJobStateFetchException, FlinkSavePointException}
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.FlinkJobLaunchManager
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.{FlinkTriggerSavepointOperator, FlinkYarnLogOperator}
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkSavePointException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.EngineConnJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.FlinkTriggerSavepointOperator
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.FlinkSavepoint
+import org.apache.linkis.common.ServiceInstance
import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SubmittableSimpleOnceJob}
import org.apache.linkis.computation.client.once.OnceJob
-import org.apache.linkis.computation.client.once.simple.SimpleOnceJob
-import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator
-import java.net.URI
-
-class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager: JobStateManager)
- extends JobClient[FlinkJobInfo] with Logging{
+/**
+ * @author jefftlin
+ */
+abstract class AbstractJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager)
+ extends JobClient[JobInfo] with Logging{
/**
- * Log operator
+ * Refresh job info and return
+ *
+ * @return
*/
- private var logOperatorMap = Map(
- "client" -> EngineConnLogOperator.OPERATOR_NAME,
- "yarn" -> FlinkYarnLogOperator.OPERATOR_NAME
- )
-
- override def getJobInfo: FlinkJobInfo = {
- getJobInfo(false)
- }
+ override def getJobInfo: JobInfo = getJobInfo(false)
/**
* Refresh job info and return
@@ -53,18 +47,39 @@ class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager:
* @param refresh refresh
* @return
*/
- override def getJobInfo(refresh: Boolean): FlinkJobInfo = {
+ override def getJobInfo(refresh: Boolean): JobInfo = {
onceJob match {
case simpleOnceJob: SimpleOnceJob =>
- simpleOnceJob.getStatus
- jobInfo.setStatus(if (refresh) onceJob.getNodeInfo
- .getOrDefault("nodeStatus", simpleOnceJob.getStatus).asInstanceOf[String] else simpleOnceJob.getStatus)
+ Utils.tryCatch {
+ jobInfo.setStatus(if (refresh && null != onceJob.getNodeInfo) onceJob.getNodeInfo
+ .getOrDefault("nodeStatus", simpleOnceJob.getStatus).asInstanceOf[String] else simpleOnceJob.getStatus)
+ } {
+ case e: Exception =>
+ val ec: ServiceInstance = simpleOnceJob.getEcServiceInstance
+ if (null != ec) {
+ if (e.getMessage.contains(s"Instance does not exist ServiceInstance(linkis-cg-engineconn, ${ec.getInstance}")) {
+ logger.warn(s"EC instance : ${ec.toString()} not exist, will set status to Failed.")
+ jobInfo.setStatus("Failed")
+ }
+ } else {
+ logger.error(s"EC instance of job : ${jobInfo.getId} is null, no need to get job status.")
+ throw e
+ }
+ }
}
jobInfo
}
+ /**
+ * Stop directly
+ * SparkRestJobClient need to override
+ */
+ override def stop(): Unit = stop(false)
+
/**
* Stop the job connected remote
+ * Used in FlinkRestJobClient & EngineConnJobClient
+ * Not support SparkRestJobClient
*
* @param snapshot if do snapshot to save the job state
*/
@@ -86,63 +101,39 @@ class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager:
}
/**
- * Stop directly
- */
- override def stop(): Unit = stop(false)
-/**
- * Fetch logs
- * @param requestPayload request payload
+ * Trigger save point operation
+ * Used in FlinkRestJobClient & EngineConnJobClient
+ * Not support SparkRestJobClient
+ *
* @return
*/
- def fetchLogs(requestPayload: LogRequestPayload): FlinkLogIterator = {
- logOperatorMap.get(requestPayload.getLogType) match {
- case Some(operator) =>
- onceJob.getOperator(operator) match {
- case engineConnLogOperator: EngineConnLogOperator =>
- engineConnLogOperator match {
- case yarnLogOperator: FlinkYarnLogOperator => yarnLogOperator.setApplicationId(jobInfo.getApplicationId)
- case _ =>
- }
- engineConnLogOperator.setECMServiceInstance(jobInfo.getECMInstance)
- engineConnLogOperator.setEngineConnType(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE)
- val logIterator = new SimpleFlinkJobLogIterator(requestPayload, engineConnLogOperator)
- logIterator.init()
- jobInfo match {
- case jobInfo: FlinkJobInfo => jobInfo.setLogPath(logIterator.getLogPath)
- case _ =>
- }
- logIterator
- }
- case None =>
- throw new FlinkJobStateFetchException(-1, s"Unrecognized log type: ${requestPayload.getLogType}", null)
- }
-
-
+ def triggerSavepoint(): FlinkSavepoint = {
+ getJobInfo(true)
+ val savepointURI = this.stateManager.getJobStateDir(classOf[FlinkSavepoint], jobInfo.getName)
+ triggerSavepoint(savepointURI.toString, JobLauncherConfiguration.FLINK_TRIGGER_SAVEPOINT_MODE.getValue)
}
/**
- * Get check points
- * @return
- */
- def getCheckpoints: Array[Checkpoint] = throw new FlinkJobStateFetchException(30401, "Not support method", null)
-
-
- /**
- * Trigger save point operation
+ * Trigger save point operation with savePointDir and mode
+ * Used in FlinkRestJobClient & EngineConnJobClient
+ * Not support SparkRestJobClient
+ *
* @param savePointDir savepoint directory
- * @param mode mode
+ * @param mode mode
*/
- def triggerSavepoint(savePointDir: String, mode: String): Savepoint = {
+ def triggerSavepoint(savePointDir: String, mode: String): FlinkSavepoint = {
Utils.tryCatch{
onceJob.getOperator(FlinkTriggerSavepointOperator.OPERATOR_NAME) match{
case savepointOperator: FlinkTriggerSavepointOperator => {
- // TODO Get scheme information from job info
+ jobInfo match {
+ case engineConnJobInfo: EngineConnJobInfo =>
+ savepointOperator.setApplicationId(engineConnJobInfo.getApplicationId)
+ }
savepointOperator.setSavepointDir(savePointDir)
savepointOperator.setMode(mode)
Option(savepointOperator()) match {
- case Some(savepoint: Savepoint) =>
+ case Some(savepoint: FlinkSavepoint) =>
savepoint
- // TODO store into job Info
case _ => throw new FlinkSavePointException(-1, "The response savepoint info is empty", null)
}
}
@@ -151,15 +142,8 @@ class FlinkJobClient(onceJob: OnceJob, var jobInfo: FlinkJobInfo, stateManager:
case se: FlinkSavePointException =>
throw se
case e: Exception =>
- // TODO defined the code for savepoint exception
- throw new FlinkSavePointException(-1, "Fail to trigger savepoint operator", e)
+ throw new FlinkSavePointException(30501, "Fail to trigger savepoint operator", e)
}
}
- def triggerSavepoint(): Savepoint = {
- val savepointURI: URI = this.stateManager.getJobStateDir(classOf[Savepoint], jobInfo.getName)
- triggerSavepoint(savepointURI.toString, JobLauncherConfiguration.FLINK_TRIGGER_SAVEPOINT_MODE.getValue)
- }
-
-
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/AbstractRestJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/AbstractRestJobClient.scala
new file mode 100644
index 000000000..79998e36c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/AbstractRestJobClient.scala
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import org.apache.http.impl.client.CloseableHttpClient
+import org.apache.linkis.computation.client.once.OnceJob
+
+/**
+ * @author jefftlin
+ */
+abstract class AbstractRestJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager) extends AbstractJobClient(onceJob, jobInfo, stateManager) {
+
+ /**
+ * The http client in onceJob
+ */
+ protected var httpClient: CloseableHttpClient = _
+
+ def setHttpClient(httpClient: CloseableHttpClient): Unit = {
+ this.httpClient = httpClient
+ }
+
+ /**
+ * Get linkis client
+ * @return
+ */
+ def getHttpClient(): CloseableHttpClient = httpClient
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/EngineConnJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/EngineConnJobClient.scala
new file mode 100644
index 000000000..0dcfd851c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/EngineConnJobClient.scala
@@ -0,0 +1,89 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.core.{FlinkLogIterator, SimpleFlinkJobLogIterator}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobStateFetchException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.{EngineConnJobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.FlinkJobLaunchManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator.{FlinkClientLogOperator, FlinkYarnLogOperator}
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.utils.Utils
+import org.apache.linkis.computation.client.once.action.ECResourceInfoAction
+import org.apache.linkis.computation.client.once.result.ECResourceInfoResult
+import org.apache.linkis.computation.client.once.OnceJob
+import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+
+import java.util
+
+/**
+ * @author jefftlin
+ */
+class EngineConnJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager)
+ extends LinkisEngineConnJobClient(onceJob, jobInfo, stateManager) {
+
+ /**
+ * Log operator
+ */
+ private var logOperatorMap = Map(
+ "client" -> FlinkClientLogOperator.OPERATOR_NAME,
+ "yarn" -> FlinkYarnLogOperator.OPERATOR_NAME
+ )
+
+ /**
+ * Fetch logs
+ * @param requestPayload request payload
+ * @return
+ */
+ def fetchLogs(requestPayload: LogRequestPayload): FlinkLogIterator = {
+ logOperatorMap.get(requestPayload.getLogType) match {
+ case Some(operator) =>
+ onceJob.getOperator(operator) match {
+ case engineConnLogOperator: EngineConnLogOperator =>
+ val logIterator = new SimpleFlinkJobLogIterator(requestPayload, engineConnLogOperator)
+ engineConnLogOperator match {
+ case clientLogOperator: FlinkClientLogOperator =>
+ var logDirSuffix = this.jobInfo.asInstanceOf[EngineConnJobInfo].getLogDirSuffix
+ if (StringUtils.isBlank(logDirSuffix) && requestPayload.isLogHistory){
+ // If want to fetch the history log, must get the log directory suffix first
+ getLinkisClient match {
+ case client: DWSHttpClient =>
+ Option(Utils.tryCatch{
+ client.execute(ECResourceInfoAction.newBuilder().setUser(jobInfo.getUser)
+ .setTicketid(clientLogOperator.getTicketId).build()).asInstanceOf[ECResourceInfoResult]
+ }{
+ case e: Exception =>
+ warn("Fail to query the engine conn resource info from linkis", e)
+ null
+ }) match {
+ case Some(result) => logDirSuffix = Utils.tryQuietly{Option(result.getData).getOrElse(new util.HashMap[String, AnyRef]()).getOrDefault("ecResourceInfoRecord", new util.HashMap[String, AnyRef]).asInstanceOf[util.Map[String, AnyRef]]
+ .getOrDefault("logDirSuffix", "").asInstanceOf[String]}
+ case _ =>
+ }
+ }
+ }
+ clientLogOperator.setLogDirSuffix(logDirSuffix)
+ logIterator.setLogDirSuffix(logDirSuffix)
+ case _ =>
+ }
+ engineConnLogOperator match {
+ case yarnLogOperator: FlinkYarnLogOperator => yarnLogOperator.setApplicationId(jobInfo.asInstanceOf[EngineConnJobInfo].getApplicationId)
+ case _ =>
+ }
+ engineConnLogOperator.setECMServiceInstance(jobInfo.asInstanceOf[EngineConnJobInfo].getECMInstance)
+ engineConnLogOperator.setEngineConnType(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE)
+ logIterator.init()
+ jobInfo match {
+ case jobInfo: EngineConnJobInfo => jobInfo.setLogPath(logIterator.getLogPath)
+ case _ =>
+ }
+ logIterator
+ }
+ case None =>
+ throw new FlinkJobStateFetchException(-1, s"Unrecognized log type: ${requestPayload.getLogType}", null)
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/FlinkRestJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/FlinkRestJobClient.scala
new file mode 100644
index 000000000..4a69743df
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/FlinkRestJobClient.scala
@@ -0,0 +1,47 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
+import org.apache.linkis.computation.client.once.OnceJob
+
+/**
+ * @author jefftlin
+ */
+class FlinkRestJobClient (onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager) extends AbstractRestJobClient(onceJob, jobInfo, stateManager){
+
+ def getJobId(): String = null
+
+ /**
+ * Stop the job connected remote
+ *
+ * @param snapshot if do snapshot to save the job state
+ */
+ override def stop(snapshot: Boolean): JobStateInfo = {
+ var stateInfo: JobStateInfo = null
+ if (snapshot){
+ // Begin to call the savepoint operator
+ info(s"Trigger Savepoint operator for job [${jobInfo.getId}] before pausing job.")
+ Option(triggerSavepoint()) match {
+ case Some(savepoint) =>
+ stateInfo = new JobStateInfo
+ stateInfo.setLocation(savepoint.getLocation.toString)
+ stateInfo.setTimestamp(savepoint.getTimestamp)
+ case _ =>
+ }
+ }
+ onceJob.kill()
+ stateInfo
+ }
+
+ /**
+ * Snapshot
+ *
+ * @return
+ */
+ def snapshot(): Boolean = {
+ triggerSavepoint()
+ true
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisEngineConnJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisEngineConnJobClient.scala
new file mode 100644
index 000000000..b52328fbc
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisEngineConnJobClient.scala
@@ -0,0 +1,25 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import org.apache.linkis.computation.client.once.OnceJob
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+
+/**
+ * @author jefftlin
+ */
+abstract class LinkisEngineConnJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager) extends AbstractJobClient(onceJob, jobInfo, stateManager) {
+
+ /**
+ * The linkis client in onceJob
+ */
+ protected var linkisClient: DWSHttpClient = _
+
+ def setLinkisClient(linkisClient: DWSHttpClient): Unit = {
+ this.linkisClient = linkisClient
+ }
+
+ def getLinkisClient(): DWSHttpClient = {
+ this.linkisClient
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisFlinkManagerClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisFlinkManagerClient.scala
new file mode 100644
index 000000000..44bba94e4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisFlinkManagerClient.scala
@@ -0,0 +1,372 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.FlinkManagerActionType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{FlinkManagerAction, FlinkManagerClient}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.{FlinkJobFlinkECErrorException, FlinkJobParamErrorException}
+import org.apache.commons.lang3.{SerializationUtils, StringUtils}
+import org.apache.linkis.common.ServiceInstance
+import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils}
+import org.apache.linkis.computation.client.LinkisJobBuilder
+import org.apache.linkis.computation.client.once.LinkisManagerClient
+import org.apache.linkis.computation.client.once.action.{AskEngineConnAction, ECMOperateAction, EngineConnOperateAction, GetEngineConnAction}
+import org.apache.linkis.computation.client.once.result.EngineConnOperateResult
+import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder
+import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder.getLinkisManagerClient
+import org.apache.linkis.governance.common.constant.ec.ECConstants
+import org.apache.linkis.governance.common.enums.OnceJobOperationBoundary
+import org.apache.linkis.manager.common.constant.AMConstant
+import org.apache.linkis.manager.common.entity.enumeration.NodeStatus
+import org.apache.linkis.manager.label.conf.LabelCommonConfig
+import org.apache.linkis.manager.label.constant.LabelKeyConstant
+import org.apache.linkis.manager.label.entity.TenantLabel
+import org.apache.linkis.manager.label.entity.engine._
+
+import java.util
+import java.util.concurrent.TimeUnit
+import java.util.concurrent.atomic.AtomicBoolean
+
+class LinkisFlinkManagerClient extends FlinkManagerClient with Logging {
+
+ private var ecInstance: ServiceInstance = _
+ private var ecmInstance: ServiceInstance = _
+ private val healthy = new AtomicBoolean(true)
+
+ def init(): Unit = {
+ refreshManagerEC()
+ }
+
+ private def getOrCreateLinkisManagerECAndECM(): (ServiceInstance, ServiceInstance) = {
+ val initLabels = new util.HashMap[String, String]
+ val submitUser = JobLauncherConfiguration.FLINK_MANAGER_EC_SUBMIT_USER.getValue
+ val submitCreator = JobLauncherConfiguration.FLINK_MANAGER_EC_SUBMIT_CREATOR.getValue
+ val engineTypeLabel = new EngineTypeLabel
+ engineTypeLabel.setEngineType(EngineType.FLINK.toString)
+ engineTypeLabel.setVersion(LabelCommonConfig.FLINK_ENGINE_VERSION.getValue)
+ val codeTypeLabel = new CodeLanguageLabel
+ codeTypeLabel.setCodeType(RunType.JSON.toString)
+ val userCreatorLabel = new UserCreatorLabel
+ userCreatorLabel.setUser(submitUser)
+ userCreatorLabel.setCreator(submitCreator)
+ if (StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_MANAGER_EC_TENANT.getValue)) {
+ val tenantLabel = new TenantLabel
+ tenantLabel.setTenant(JobLauncherConfiguration.FLINK_MANAGER_EC_TENANT.getValue)
+ initLabels.put(tenantLabel.getLabelKey, tenantLabel.getStringValue)
+ }
+ val managerLabel = new ManagerLabel
+ managerLabel.setManager(EngineType.FLINK.toString)
+ initLabels.put(engineTypeLabel.getLabelKey, engineTypeLabel.getStringValue)
+ initLabels.put(codeTypeLabel.getLabelKey, codeTypeLabel.getStringValue)
+ initLabels.put(userCreatorLabel.getLabelKey, userCreatorLabel.getStringValue)
+ initLabels.put(managerLabel.getLabelKey, managerLabel.getStringValue)
+ if (StringUtils.isNotBlank(JobLauncherConfiguration.FLINK_MANAGER_EC_TENANT.getValue)) {
+ initLabels.put(LabelKeyConstant.TENANT_KEY, JobLauncherConfiguration.FLINK_MANAGER_EC_TENANT.getValue)
+ }
+ val initProperties = new util.HashMap[String, String]()
+
+ initProperties.put("flink.app.savePointPath", "./tmp")
+ initProperties.put("flink.app.name", "FlinkManagerEC")
+ initProperties.put(JobLauncherConfiguration.FLINK_MANAGER_EC_KEY.getValue, true.toString)
+ initProperties.put(JobLauncherConfiguration.LINKIS_EC_EXPIRE_TIME_KEY.getValue, JobLauncherConfiguration.FLINKK_MANAGER_EXIT_TIME.getHotValue().toString)
+ initProperties.put(JobLauncherConfiguration.LINKIS_EC_SUPPORT_PARALLEM, true.toString)
+
+ var askEngineConnAction = AskEngineConnAction
+ .newBuilder()
+ .setCreateService(getClass.getSimpleName)
+ .setLabels(initLabels)
+ .setProperties(initProperties)
+ .setUser(submitUser)
+ .setMaxSubmitTime(30000)
+ .setDescription("Ask a flink manager ec.")
+ .build()
+ var end = false
+ val linkisManagerClient = getLinkisManagerClient
+ var ecServiceInstance: ServiceInstance = null
+ var ecmInstance: ServiceInstance = null
+ logger.info(s"ask flink manager ec askEngineConnAction: ${askEngineConnAction.getRequestPayload}")
+ var nodeInfo: util.Map[String, Any] = null
+ Utils.tryCatch {
+ nodeInfo = linkisManagerClient.askEngineConn(askEngineConnAction).getNodeInfo
+ } {
+ case e: Exception =>
+ logger.error("call askEngineConn to get manager ec failed. ", e)
+ }
+ val tmpLabels = SerializationUtils.clone(initLabels).asInstanceOf[util.Map[String, String]]
+ val tmpProps = SerializationUtils.clone(initProperties).asInstanceOf[util.Map[String, String]]
+// var lastAsyncId: String = null
+// var lastManagerInstance: ServiceInstance = null
+ var retryCount = 0
+ val MAX_RETRY_COUNT = 10
+
+ while (!end) {
+ retryCount = retryCount + 1
+ if (null != nodeInfo) {
+ nodeInfo.get(AMConstant.EC_ASYNC_START_RESULT_KEY) match {
+ case AMConstant.EC_ASYNC_START_RESULT_SUCCESS =>
+ end = true
+ ecServiceInstance = getServiceInstance(nodeInfo)
+ ecmInstance = getECMInstance(nodeInfo)
+ val ticketId = getTicketId(nodeInfo)
+ val status = getAs[String](nodeInfo, ECConstants.NODE_STATUS_KEY)
+ if (!NodeStatus.Unlock.toString.equalsIgnoreCase(status)) {
+ logger.info(s"Get manager ec with status : ${status}")
+ waitUntilRunOrFailed(ecServiceInstance, ticketId, submitUser)
+ }
+ case AMConstant.EC_ASYNC_START_RESULT_FAIL =>
+ if (retryCount < MAX_RETRY_COUNT) {
+ end = false
+ } else {
+ end = true
+ }
+ val failMsg = nodeInfo.getOrDefault(AMConstant.EC_ASYNC_START_FAIL_MSG_KEY, "no reason")
+ logger.error(s"start flink manager ec failed because: ${failMsg}")
+ logger.warn(s"askEngineConnAction: ${askEngineConnAction.getRequestPayload}")
+ if (tmpProps.containsKey(AMConstant.EC_ASYNC_START_ID_KEY)) {
+ tmpProps.remove(AMConstant.EC_ASYNC_START_ID_KEY)
+ }
+ if (nodeInfo.get(AMConstant.EC_ASYNC_START_FAIL_RETRY_KEY).toString.toBoolean) {
+ logger.warn("start manager ec failed but can retry.")
+ } else {
+ throw new FlinkJobFlinkECErrorException(s"Start manager ec failed. Because : ${failMsg}")
+ }
+ case null =>
+ end = true
+ logger.error(s"start flink manager ec failed because: null ec result status")
+ logger.warn(s"askEngineConnAction: ${askEngineConnAction.getRequestPayload}")
+ throw new FlinkJobFlinkECErrorException(s"Start manager ec failed. Null ec result status")
+ }
+ }
+ Thread.sleep(1000)
+ askEngineConnAction = AskEngineConnAction
+ .newBuilder()
+ .setCreateService(getClass.getSimpleName)
+ .setLabels(tmpLabels)
+ .setProperties(tmpProps)
+ .setUser("hadoop")
+ .setMaxSubmitTime(30000)
+ .setDescription("Ask a flink manager ec.")
+ .build()
+ Utils.tryAndWarn{
+ nodeInfo = linkisManagerClient.askEngineConn(askEngineConnAction).getNodeInfo
+ }
+ logger.debug(JsonUtils.jackson.writeValueAsString(nodeInfo))
+ }
+ if (null != ecServiceInstance) {
+ logger.info("ecInstance : " + ecServiceInstance.toString())
+ } else {
+ logger.warn("Got null ecInstance.")
+ }
+ if (null != ecmInstance ) {
+ logger.info("ecmInstance : " + ecmInstance.toString())
+ } else {
+ logger.warn("Got null ecm Instance")
+ }
+ (ecServiceInstance, ecmInstance)
+ }
+
+ private def waitUntilRunOrFailed(serviceInstance: ServiceInstance, ticketId: String, user: String): Unit = {
+ var end = false
+ val MAX_RETRY = 10
+ var count = 0
+ while (!end) {
+ count = count + 1
+ Utils.tryAndWarn {
+ val result = LinkisManagerClient.apply(LinkisJobBuilder.getDefaultUJESClient).getEngineConn(GetEngineConnAction
+ .newBuilder()
+ .setApplicationName(serviceInstance.getApplicationName)
+ .setInstance(serviceInstance.getInstance)
+ .setTicketId(ticketId)
+ .setUser(user)
+ .build())
+ val nodeInfo = result.getNodeInfo
+ val status = getAs[String](nodeInfo, ECConstants.NODE_STATUS_KEY)
+ logger.info(s"ec : ${serviceInstance.toString()} status : ${status}")
+ if (NodeStatus.Unlock == NodeStatus.isCompleted(NodeStatus.toNodeStatus(status))) {
+ end = true
+ } else {
+ end = false
+ }
+ }
+ if (count > MAX_RETRY) {
+ end = true
+ }
+ Thread.sleep(1000)
+ }
+ }
+
+ override def setFlinkManagerEngineConnInstance(ecInstance: ServiceInstance): Unit = this.ecInstance = ecInstance
+
+ override def getFlinkManagerEngineConnInstance(): ServiceInstance = {
+ if (null == ecInstance) {
+ refreshManagerEC()
+ }
+ ecInstance
+ }
+
+ override def setFlinkManagerECMInstance(ecmInstance: ServiceInstance): Unit = this.ecmInstance = ecmInstance
+
+ override def getFlinkManagerECMInstance(): ServiceInstance = this.ecmInstance
+
+ override def executeAction(action: FlinkManagerAction): Any = {
+ action.setExeuteUser(JobLauncherConfiguration.FLINK_MANAGER_EC_SUBMIT_USER.getValue)
+ action.getActionType match {
+ case validType if FlinkManagerActionType.values().contains(validType) =>
+ val ecInstance = getFlinkManagerEngineConnInstance()
+ action.getPlayloads.put(ECConstants.EC_SERVICE_INSTANCE_KEY, ecInstance)
+ action.getPlayloads().put(ECConstants.ECM_SERVICE_INSTANCE_KEY, getFlinkManagerECMInstance())
+ action.setECInstance(getFlinkManagerEngineConnInstance())
+ val builtAction = action.build()
+ action.getOperationBoundry match {
+ case OnceJobOperationBoundary.ECM =>
+ if (builtAction.isInstanceOf[ECMOperateAction]) {
+ return doExecution(builtAction.asInstanceOf[ECMOperateAction])
+ } else {
+ throw new FlinkJobParamErrorException("FlinkManagerAction build invalid ECMOperation with operationBounday : common")
+ }
+ case OnceJobOperationBoundary.EC =>
+ if (builtAction.isInstanceOf[EngineConnOperateAction]) {
+ return doExecution(builtAction.asInstanceOf[EngineConnOperateAction])
+ } else {
+ throw new FlinkJobParamErrorException("FlinkManagerAction build invalid EngineConnOperation with operationBounday : private")
+ }
+ }
+ case _ =>
+ throw new FlinkJobParamErrorException(s"Unsupported FlinkManagerAction : ${action}")
+ }
+ }
+
+ def doExecution(operationAction: EngineConnOperateAction): EngineConnOperateResult = {
+ val operation = JsonUtils.jackson.writeValueAsString(operationAction)
+ if (!healthy.get()) {
+ logger.warn(s"FlinkManager is not healthy, will skip the operation : ${operation}.")
+ throw new FlinkJobFlinkECErrorException(s"FlinkManager is not healthy, skip the operation : ${operation}(管理EC异常,请稍后再试).")
+ }
+ val managerEcInstance = getFlinkManagerEngineConnInstance()
+ val notFoundMsg = s"Ec : ${managerEcInstance.toString()} not found"
+ Utils.tryCatch {
+ var ecOperateResult = SimpleOnceJobBuilder.getLinkisManagerClient.executeEngineConnOperation(operationAction)
+ if (ecOperateResult.getIsError()) {
+ if (null != ecOperateResult.getErrorMsg() && ecOperateResult.getErrorMsg().contains(notFoundMsg)) {
+ logger.warn(notFoundMsg)
+ Utils.tryAndError(refreshManagerEC())
+ ecOperateResult = SimpleOnceJobBuilder.getLinkisManagerClient.executeEngineConnOperation(operationAction)
+ return ecOperateResult
+ } else {
+ logger.error(s"There are errors, but the errorMsg is null. rs : ${JsonUtils.jackson.writeValueAsString(ecOperateResult)}")
+ return ecOperateResult
+ }
+ } else {
+ return ecOperateResult
+ }
+ } {
+ case e: Exception =>
+ logger.error(s"executeEngineConnOperation failed with action : ${operationAction}", e)
+ if (null == managerEcInstance || e.getMessage.contains(notFoundMsg)) {
+ logger.error(e.getMessage)
+ Utils.tryAndError(refreshManagerEC())
+ return SimpleOnceJobBuilder.getLinkisManagerClient.executeEngineConnOperation(operationAction)
+ }
+ throw e
+ }
+
+ }
+
+ private def getServiceInstance(nodeInfo: util.Map[String, Any]): ServiceInstance =
+ nodeInfo.get(ECConstants.EC_SERVICE_INSTANCE_KEY) match {
+ case serviceInstance: util.Map[String, Any] =>
+ ServiceInstance(
+ getAs(serviceInstance, "applicationName"),
+ getAs(serviceInstance, "instance")
+ )
+ case _ =>
+ null
+ }
+
+ private def getECMInstance(nodeInfo: util.Map[String, Any]): ServiceInstance =
+ nodeInfo.get(ECConstants.ECM_SERVICE_INSTANCE_KEY) match {
+ case serviceInstance: util.Map[String, Any] =>
+ ServiceInstance(
+ getAs(serviceInstance, "applicationName"),
+ getAs(serviceInstance, "instance")
+ )
+ case _ =>
+ null
+ }
+
+ private def getManagerInstance(nodeInfo: util.Map[String, Any]): ServiceInstance =
+ nodeInfo.getOrDefault(ECConstants.MANAGER_SERVICE_INSTANCE_KEY, null) match {
+ case serviceInstance: ServiceInstance =>
+ serviceInstance
+ case _ =>
+ null
+ }
+
+ private def getAs[T](map: util.Map[String, Any], key: String): T =
+ map.getOrDefault(key, null).asInstanceOf[T]
+
+ def getTicketId(nodeInfo: util.Map[String, Any]): String = getAs(nodeInfo, "ticketId")
+
+ override def refreshManagerEC(): Unit = {
+ LinkisFlinkManagerClient.ASK_EC_LOCK.synchronized {
+ logger.info("Start to refresh manager ec.")
+
+ def refreshEC(): Unit = {
+ val rs = getOrCreateLinkisManagerECAndECM()
+ ecInstance = rs._1
+ ecmInstance = rs._2
+ healthy.set(true)
+ }
+
+ Utils.tryCatch(refreshEC()) {
+ case e: Exception =>
+ logger.error("Refresh manager ec failed. Will try once.", e)
+ healthy.set(false)
+ Utils.tryCatch(refreshEC()) {
+ case e1: Exception =>
+ logger.error("Refresh manager ec again failed. Will throw the exception.", e1)
+ healthy.set(false)
+ throw e1
+ }
+ }
+ }
+ }
+}
+
+object LinkisFlinkManagerClient extends Logging {
+
+ private val CLIENT_LOCK = new Object()
+
+ private val ASK_EC_LOCK = new Object()
+
+ private var client: LinkisFlinkManagerClient = _
+
+ def getInstance(): LinkisFlinkManagerClient = {
+ if (null == client) {
+ CLIENT_LOCK.synchronized {
+ if (null == client) {
+ client = new LinkisFlinkManagerClient
+ }
+ }
+ }
+ client
+ }
+
+
+ def initScheduledTask(): Unit = {
+ if (!JobLauncherConfiguration.ENABLE_FLINK_MANAGER_EC_ENABLE.getValue) {
+ logger.info("Flink manager ec refresh task was disabled. Will skip the scheduled refreshing task.")
+ return
+ }
+ Utils.defaultScheduler.scheduleWithFixedDelay(new Runnable {
+ override def run(): Unit = {
+ logger.info("Start to refresh manager EC.")
+ Utils.tryAndError(getInstance().refreshManagerEC())
+ }
+ },
+ 5000,
+ JobLauncherConfiguration.FLINK_MANAGER_EC_REFRESH_INTERVAL.getValue,
+ TimeUnit.MILLISECONDS)
+ logger.info("Manager EC refreshing task started.")
+ }
+
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisFlinkManagerJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisFlinkManagerJobClient.scala
new file mode 100644
index 000000000..7c2256c9e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/LinkisFlinkManagerJobClient.scala
@@ -0,0 +1,216 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.YarnAppVo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.JobClientType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.errorcode.JobLaunchErrorCode
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobFetchErrorException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.{FlinkJobKillECErrorException, FlinkJobParamErrorException, FlinkJobStateFetchException, FlinkSavePointException}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.action.{FlinkKillAction, FlinkSaveAction, FlinkStatusAction, ListYarnAppAction}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.LinkisFlinkManagerJobClient.linkisFlinkManagerClient
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.EngineConnJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.FlinkSavepoint
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.exception.LinkisRetryException
+import org.apache.linkis.common.utils.{JsonUtils, Logging, RetryHandler}
+import org.apache.linkis.computation.client.once.OnceJob
+import org.apache.linkis.computation.client.once.result.EngineConnOperateResult
+import org.apache.linkis.computation.client.once.simple.SimpleOnceJob
+import org.apache.linkis.governance.common.constant.ec.ECConstants
+import org.apache.linkis.ujes.client.exception.UJESJobException
+
+import java.util
+import scala.collection.JavaConverters.asScalaBufferConverter
+
+class LinkisFlinkManagerJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager) extends EngineConnJobClient(onceJob, jobInfo, stateManager) {
+
+
+ override def init(): Unit = {
+ super.init()
+ logger.info("LinkisFlinkManagerJobClient inited.")
+ }
+
+
+ private def isDetachJob(info: JobInfo): Boolean = {
+ JobClientType.OTHER.toJobClientType(jobInfo.getClientType.toLowerCase()) match {
+ case JobClientType.ATTACH =>
+ false
+ case JobClientType.DETACH =>
+ true
+ case JobClientType.DETACH_STANDALONE =>
+ // TODO check
+ true
+ case _ =>
+ throw new FlinkJobParamErrorException(s"Job with manager mode : ${jobInfo.getClientType} cannot be submited.", null)
+ }
+ }
+
+ override def getJobInfo(refresh: Boolean): JobInfo = {
+ onceJob match {
+ case simpleOnceJob: SimpleOnceJob =>
+ if (StringUtils.isNotBlank(jobInfo.getStatus) && JobConf.isCompleted(JobConf.linkisStatusToStreamisStatus(jobInfo.getStatus))) {
+ jobInfo.setStatus(simpleOnceJob.getStatus)
+ logger.info(s"Job : ${simpleOnceJob.getId} is completed, no need to get status from linkis.")
+ } else if (refresh && isDetachJob(jobInfo)) {
+ jobInfo match {
+ case engineConnJobInfo: EngineConnJobInfo =>
+ jobInfo.setStatus(getJobStatusWithRetry(engineConnJobInfo.getApplicationId))
+ case _ =>
+ throw new FlinkJobParamErrorException(s"Invalid jobInfo : ${jobInfo} , cannot get status.", null)
+ }
+ } else {
+ return super.getJobInfo(refresh)
+ }
+ }
+ jobInfo
+ }
+
+ override def stop(snapshot: Boolean): JobStateInfo = {
+ if (isDetachJob(jobInfo)) {
+ jobInfo match {
+ case engineConnJobInfo: EngineConnJobInfo =>
+ val appId = engineConnJobInfo.getApplicationId
+ return stopApp(appId, snapshot)
+ case _ =>
+ throw new FlinkJobParamErrorException(s"Invalid jobInfo : ${jobInfo} , cannot stop.", null)
+ }
+ } else {
+ return super.stop(snapshot)
+ }
+ }
+
+ def getJobStatusWithRetry(appId: String): String = {
+ val retryHandler = new RetryHandler {}
+ retryHandler.setRetryNum(3)
+ retryHandler.setRetryMaxPeriod(5000)
+ retryHandler.setRetryPeriod(1000)
+ retryHandler.addRetryException(classOf[UJESJobException])
+ retryHandler.addRetryException(classOf[LinkisRetryException])
+ retryHandler.addRetryException(classOf[FlinkJobStateFetchException])
+ retryHandler.retry(
+ {
+ val statusAction = new FlinkStatusAction(appId, null)
+ val rs = linkisFlinkManagerClient.executeAction(statusAction)
+ rs match {
+ case engineConnOperateResult: EngineConnOperateResult =>
+ if (engineConnOperateResult.getIsError()) {
+ throw new FlinkJobStateFetchException(errorMsg = s"Get status error. Because : ${engineConnOperateResult.getErrorMsg()}", t = null)
+ }
+ val rsMap = engineConnOperateResult.getResult
+ val status = rsMap.getOrDefault(ECConstants.NODE_STATUS_KEY, null)
+ logger.info(s"AppId : ${appId} got status : ${status}")
+ if (null != status) {
+ return status.toString
+ } else {
+ val json = JsonUtils.jackson.writeValueAsString(rsMap)
+ throw new FlinkJobStateFetchException(errorMsg = s"Get invalid status. Result map : ${json}", t = null)
+ }
+ case _ =>
+ val json = JsonUtils.jackson.writeValueAsString(rs)
+ throw new FlinkJobStateFetchException(errorMsg = s"Get invalid result. Response json : ${json}", t = null)
+ }
+ },
+ "Retry-Get-Status")
+ }
+
+ def stopApp(appId: String, snapshot: Boolean): JobStateInfo = {
+ val jobStateInfo = new JobStateInfo()
+ if (snapshot) {
+ val savepointURI = this.stateManager.getJobStateDir(classOf[FlinkSavepoint], jobInfo.getName)
+ val flinkSavepoint = doSavePoint(appId, null, savepointURI.toString, JobLauncherConfiguration.FLINK_TRIGGER_SAVEPOINT_MODE.getValue)
+ jobStateInfo.setLocation(flinkSavepoint.getLocation.toString)
+ jobStateInfo.setTimestamp(flinkSavepoint.getTimestamp)
+ }
+ val stopAction = new FlinkKillAction(appId, null)
+ val rs = linkisFlinkManagerClient.executeAction(stopAction)
+ rs match {
+ case engineConnOperateResult: EngineConnOperateResult =>
+ if (engineConnOperateResult.getIsError()) {
+ throw new FlinkJobStateFetchException(errorMsg = s"Get status error. Because : ${engineConnOperateResult.getErrorMsg()}", t = null)
+ }
+ case _ =>
+ val json = JsonUtils.jackson.writeValueAsString(rs)
+ throw new FlinkJobStateFetchException(errorMsg = s"Get invalid result. Response json : ${json}", t = null)
+ }
+ if (StringUtils.isBlank(jobStateInfo.getLocation)) {
+ jobStateInfo.setLocation("No location")
+ }
+ jobStateInfo
+ }
+
+ override def triggerSavepoint(savePointDir: String, mode: String): FlinkSavepoint = {
+ if (isDetachJob(jobInfo)) {
+ var appId: String = null
+ jobInfo match {
+ case engineConnJobInfo: EngineConnJobInfo =>
+ appId = engineConnJobInfo.getApplicationId
+ }
+ doSavePoint(appId, null, savePointDir, mode)
+ } else {
+ super.triggerSavepoint(savePointDir, mode)
+ }
+ }
+
+ def doSavePoint(appId: String, msg: String, savePointDir: String, mode: String): FlinkSavepoint = {
+
+ val savepointAction = new FlinkSaveAction(appId, msg)
+ savepointAction.setSavepointPath(savePointDir)
+ savepointAction.setMode(mode)
+ val rs = linkisFlinkManagerClient.executeAction(savepointAction)
+ rs match {
+ case engineConnOperateResult: EngineConnOperateResult =>
+ if (engineConnOperateResult.getIsError()) {
+ throw new FlinkJobKillECErrorException(s"Do savepoint error. Because : ${engineConnOperateResult.getErrorMsg()}", null)
+ }
+ val writePath = engineConnOperateResult.getAs[String](JobConstants.RESULT_SAVEPOINT_PATH_KEY)
+ if (StringUtils.isBlank(writePath)) {
+ val msg = s"Do savepoint error got null write path. Errormsg : ${engineConnOperateResult.getErrorMsg()} "
+ throw new FlinkSavePointException(errorMsg = msg, t = null)
+ }
+ new FlinkSavepoint(writePath)
+ case _ =>
+ val rsMsg = JsonUtils.jackson.writeValueAsString(rs)
+ val msg = s"Get status error. Result : ${rsMsg}"
+ throw new FlinkSavePointException(errorMsg = msg, t = null)
+ }
+ }
+
+}
+
+object LinkisFlinkManagerJobClient extends Logging {
+
+ private lazy val linkisFlinkManagerClient: LinkisFlinkManagerClient = LinkisFlinkManagerClient.getInstance()
+
+ def listYarnApp(jobName: String, user: String, msg: String = "streamis", appTypeList: util.List[String]): util.List[YarnAppVo] = {
+ val resultList = new util.ArrayList[YarnAppVo]()
+ val listAction = new ListYarnAppAction(jobName, user, msg, appTypeList)
+ listAction.setExeuteUser(user)
+ val result = linkisFlinkManagerClient.executeAction(listAction).asInstanceOf[EngineConnOperateResult]
+ if (result.getIsError()) {
+ val msg = s"list failed for jobName : ${jobName} and user : ${user}, because : ${result.getErrorMsg()}"
+ logger.error(msg)
+ throw new JobFetchErrorException(JobLaunchErrorCode.JOB_LIST_YARN_APP_ERROR, msg)
+ } else {
+ val rsMap = result.getResult
+ if (rsMap.containsKey(ECConstants.YARN_APP_RESULT_LIST_KEY)) {
+ val rsListStr = rsMap.get(ECConstants.YARN_APP_RESULT_LIST_KEY).asInstanceOf[String]
+ val rsList = JsonUtils.jackson.readValue(rsListStr, classOf[util.List[util.Map[String, String]]])
+ rsList.asScala.foreach(map => {
+ val tmpVo = new YarnAppVo()
+ tmpVo.setApplicationName(map.getOrDefault(ECConstants.YARN_APP_NAME_KEY, null))
+ tmpVo.setApplicationState(map.getOrDefault(ECConstants.NODE_STATUS_KEY, null))
+ tmpVo.setYarnAppType(map.getOrDefault(ECConstants.YARN_APP_TYPE_KEY, null))
+ tmpVo.setApplicationId(map.getOrDefault(ECConstants.YARN_APPID_NAME_KEY, null))
+ tmpVo.setApplicationUrl(map.getOrDefault(ECConstants.YARN_APP_URL_KEY, null))
+ resultList.add(tmpVo)
+ })
+ }
+ resultList
+ }
+ }
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/SparkRestJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/SparkRestJobClient.scala
new file mode 100644
index 000000000..06d449cb5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/SparkRestJobClient.scala
@@ -0,0 +1,18 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import org.apache.linkis.computation.client.once.OnceJob
+
+/**
+ * @author jefftlin
+ */
+class SparkRestJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager) extends YarnRestJobClient(onceJob, jobInfo, stateManager) {
+
+ /**
+ * Stop
+ */
+ override def stop(): Unit = {
+ //nothing
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/YarnRestJobClient.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/YarnRestJobClient.scala
new file mode 100644
index 000000000..64be1ad25
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/YarnRestJobClient.scala
@@ -0,0 +1,14 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import org.apache.linkis.computation.client.once.OnceJob
+
+/**
+ * @author jefftlin
+ */
+abstract class YarnRestJobClient(onceJob: OnceJob, jobInfo: JobInfo, stateManager: JobStateManager) extends AbstractRestJobClient(onceJob, jobInfo, stateManager) {
+
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/AbstractJobClientFactory.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/AbstractJobClientFactory.scala
new file mode 100644
index 000000000..f640c0955
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/AbstractJobClientFactory.scala
@@ -0,0 +1,99 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.factory
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.JobClientType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobLaunchErrorException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.LinkisFlinkManagerClient
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.{EngineConnJobInfo, LinkisJobInfo}
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.utils.{Logging, Utils}
+import org.apache.linkis.computation.client.once.OnceJob
+
+import java.util.concurrent.TimeUnit
+import scala.concurrent.TimeoutException
+import scala.concurrent.duration.Duration
+
+class AbstractJobClientFactory extends Logging {
+
+ var engineConnJobClientFactory: EngineConnJobClientFactory = _
+
+ var linkisFlinkManagerClientFactory: LinkisFlinkManagerClientFactory = _
+
+ def validateClientInfo(jobInfo: JobInfo): Boolean = {
+ StringUtils.isNotBlank(jobInfo.getEngineType) && StringUtils.isNotBlank(jobInfo.getEngineVersion)
+ }
+
+ /**
+ * Create job client
+ *
+ * @param onceJob once job
+ * @param jobInfo job info
+ * @return
+ */
+ def createJobClient(onceJob: OnceJob, jobInfo: JobInfo, jobStateManager: JobStateManager): JobClient[LinkisJobInfo] = {
+ if (!validateClientInfo(jobInfo)) {
+ throw new FlinkJobLaunchErrorException(-1, "Param: [engineType, engineVersion] is necessary in job information", null)
+ }
+ val clientType = Option(jobInfo.getClientType).getOrElse(JobClientType.ATTACH.getName).toLowerCase()
+ val client = getJobClientFactory(clientType)
+ .createJobClient(onceJob, jobInfo, jobStateManager)
+ .asInstanceOf[JobClient[LinkisJobInfo]]
+ client
+ }
+
+ /**
+ * Get jobClientFactory by different connectType
+ *
+ * @param connectType
+ * @return
+ */
+ def getJobClientFactory(connectType: String): JobClientFactory = {
+ connectType match {
+ case "attach" =>
+ getAttachClientFactory()
+ case "detach" =>
+ getDetachClientFactory()
+ case _ =>
+ logger.warn("Manage type is null, will use 'attach' as default.")
+ getAttachClientFactory()
+ }
+ }
+
+ def getAttachClientFactory(): JobClientFactory = {
+ if (null == this.engineConnJobClientFactory) {
+ this.synchronized {
+ if (null == this.engineConnJobClientFactory) {
+ this.engineConnJobClientFactory = new EngineConnJobClientFactory
+ this.engineConnJobClientFactory.init()
+ }
+ }
+ }
+ this.engineConnJobClientFactory
+ }
+
+ def getDetachClientFactory(): JobClientFactory = {
+ if (null == this.linkisFlinkManagerClientFactory) {
+ this.synchronized {
+ if (null == this.linkisFlinkManagerClientFactory) {
+ this.linkisFlinkManagerClientFactory = new LinkisFlinkManagerClientFactory
+ this.linkisFlinkManagerClientFactory.init()
+ }
+ }
+ }
+ this.linkisFlinkManagerClientFactory
+ }
+}
+
+object AbstractJobClientFactory{
+
+ /**
+ * Store the job launch managers
+ */
+ private val flinkJobClientFactory = new AbstractJobClientFactory
+
+ def getJobManager(): AbstractJobClientFactory = {
+ flinkJobClientFactory
+ }
+
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/EngineConnJobClientFactory.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/EngineConnJobClientFactory.scala
new file mode 100644
index 000000000..ceb5416d6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/EngineConnJobClientFactory.scala
@@ -0,0 +1,35 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.factory
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.EngineConnJobClient
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.LinkisClientUtils
+import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder
+import org.apache.linkis.computation.client.once.{LinkisManagerClient, LinkisManagerClientImpl, OnceJob}
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+
+class EngineConnJobClientFactory extends JobClientFactory {
+
+
+ /**
+ * Create job client
+ *
+ * @param onceJob once job
+ * @param jobInfo job info
+ * @param jobStateManager
+ * @return
+ */
+ override def createJobClient(onceJob: OnceJob, jobInfo: JobInfo, jobStateManager: JobStateManager): JobClient[LinkisJobInfo] = {
+ val flinkEngineConnJobClient = new EngineConnJobClient(onceJob, jobInfo, jobStateManager)
+ flinkEngineConnJobClient.setLinkisClient(LinkisClientUtils.getLinkisDwsClient)
+ flinkEngineConnJobClient.asInstanceOf[JobClient[LinkisJobInfo]]
+ }
+
+ /**
+ * Init the factory
+ */
+ override def init(): Unit = {
+ //init
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/JobClientFactory.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/JobClientFactory.scala
new file mode 100644
index 000000000..bb8a0822e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/JobClientFactory.scala
@@ -0,0 +1,16 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.factory
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo
+import org.apache.linkis.computation.client.once.OnceJob
+
+trait JobClientFactory {
+
+ def createJobClient(onceJob: OnceJob, jobInfo: JobInfo, jobStateManager: JobStateManager): JobClient[LinkisJobInfo]
+
+ /**
+ * Init the factory
+ */
+ def init(): Unit
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/LinkisFlinkManagerClientFactory.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/LinkisFlinkManagerClientFactory.scala
new file mode 100644
index 000000000..5aecf3396
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/LinkisFlinkManagerClientFactory.scala
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.factory
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.{LinkisFlinkManagerClient, LinkisFlinkManagerJobClient}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.LinkisClientUtils
+import org.apache.linkis.computation.client.once.OnceJob
+
+class LinkisFlinkManagerClientFactory extends JobClientFactory {
+
+ override def createJobClient(onceJob: OnceJob, jobInfo: JobInfo, jobStateManager: JobStateManager): JobClient[LinkisJobInfo] = {
+ val client = new LinkisFlinkManagerJobClient(onceJob, jobInfo, jobStateManager)
+ client.setLinkisClient(LinkisClientUtils.getLinkisDwsClient)
+ client.asInstanceOf[JobClient[LinkisJobInfo]]
+ }
+
+ /**
+ * Init the factory
+ */
+ override def init(): Unit = {
+ //init
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/RestJobClientFactory.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/RestJobClientFactory.scala
new file mode 100644
index 000000000..90d817ddb
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/client/factory/RestJobClientFactory.scala
@@ -0,0 +1,43 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.factory
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo, LaunchJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils.HttpClientUtil
+import org.apache.http.impl.client.CloseableHttpClient
+import org.apache.linkis.common.conf.CommonVars
+import org.apache.linkis.computation.client.once.OnceJob
+
+import java.util.concurrent.ConcurrentHashMap
+
+class RestJobClientFactory extends JobClientFactory {
+
+ val jobClientTypeList: CommonVars[String] = CommonVars.apply("wds.streamis.job.client.type", "flink,spark")
+
+ val jobClientMap = new ConcurrentHashMap[String, CloseableHttpClient]
+
+ /**
+ * Create job client
+ *
+ * @param onceJob
+ * @param flinkJobInfo
+ * @param jobStateManager
+ * @return
+ */
+ override
+ def createJobClient(onceJob: OnceJob, jobInfo: JobInfo, jobStateManager: JobStateManager): JobClient[LinkisJobInfo] = {
+ //create flink spark client
+ null
+ }
+
+ /**
+ * Init the factory, create all kinds of client
+ */
+ override def init(): Unit = {
+ // Create all kinds of client
+ jobClientTypeList.getValue.split(",").toList.foreach((engineType: String) => {
+ val httpClient = HttpClientUtil.createHttpClientUtil(null)
+ jobClientMap.put(engineType, httpClient)
+ })
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/EngineConnJobInfo.scala
similarity index 57%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/EngineConnJobInfo.scala
index 5c2986609..461dc1dee 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/FlinkJobInfo.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/EngineConnJobInfo.scala
@@ -1,77 +1,81 @@
-/*
- * Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
-
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo}
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo
+
+import com.fasterxml.jackson.annotation.JsonIgnore
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.JobClientType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
import org.apache.linkis.common.ServiceInstance
-import org.apache.linkis.httpclient.dws.DWSHttpClient
import java.util
-
-class FlinkJobInfo extends YarnJobInfo {
-
+class EngineConnJobInfo extends YarnJobInfo with LinkisJobInfo {
private var id: String = _
private var name: String = _
- private var ecmInstance: ServiceInstance = _
private var user: String = _
- private var savepoint: String = _
- private var checkpoint: String = _
- private var applicationId: String = _
- private var applicationUrl: String = _
private var status: String = _
private var logPath: String = _
private var resources: java.util.Map[String, Object] = _
private var completedMsg: String = _
private var jobStates: Array[JobStateInfo] = _
+ private var engineType: String = "flink"
+ private var engineVersion: String = "1.12.2"
+ private var clientType: String = JobClientType.ATTACH.toString
+
+ private var savepoint: String = _
+ private var checkpoint: String = _
+ private var applicationId: String = _
+ private var applicationUrl: String = _
+
+ private var ecmInstance: ServiceInstance = _
+ private var logDirSuffix: String = _
+ @JsonIgnore
+ private var jobParams: java.util.Map[String, Object] = _
+ private var ecInstance: ServiceInstance = _
+
override def getApplicationId: String = applicationId
+
def setApplicationId(applicationId: String): Unit = this.applicationId = applicationId
override def getApplicationUrl: String = applicationUrl
+
def setApplicationUrl(applicationUrl: String): Unit = this.applicationUrl = applicationUrl
override def getId: String = id
- def setId(id: String): Unit = this.id = id
+ def setId(id: String): Unit = this.id = id
override def getECMInstance: ServiceInstance = ecmInstance
+
def setECMInstance(ecmInstance: ServiceInstance): Unit = this.ecmInstance = ecmInstance
override def getUser: String = user
+
def setUser(user: String): Unit = this.user = user
override def getStatus: String = status
+
override def setStatus(status: String): Unit = this.status = status
override def getLogPath: String = logPath
+
def setLogPath(logPath: String): Unit = this.logPath = logPath
override def getResources: util.Map[String, Object] = resources
+
def setResources(resources: java.util.Map[String, Object]): Unit = this.resources = resources
def getSavepoint: String = savepoint
+
def setSavepoint(savepoint: String): Unit = this.savepoint = savepoint
def getCheckpoint: String = checkpoint
+
def setCheckpoint(checkpoint: String): Unit = this.checkpoint = checkpoint
override def getCompletedMsg: String = completedMsg
+
def setCompletedMsg(completedMsg: String): Unit = this.completedMsg = completedMsg
- override def toString: String = s"FlinkJobInfo(id: $id, status: $status, applicationId: $applicationId, applicationUrl: $applicationUrl, logPath: $logPath)"
+ override def toString: String = s"FlinkJobInfo(id: $id, name: $name, status: $status, applicationId: $applicationId, applicationUrl: $applicationUrl, logPath: $logPath)"
/**
* Contains the check point and save points
@@ -85,6 +89,7 @@ class FlinkJobInfo extends YarnJobInfo {
def setJobStates(jobStates: Array[JobStateInfo]): Unit = {
this.jobStates = jobStates
}
+
/**
* Job name
*
@@ -95,11 +100,60 @@ class FlinkJobInfo extends YarnJobInfo {
def setName(name: String): Unit = {
this.name = name
}
-}
-object FlinkJobInfo{
- def main(args: Array[String]): Unit = {
- val jobInfo = "{\"jobStates:\":{\"location\":\"xx\"}"
- DWSHttpClient.jacksonJson.readValue(jobInfo, classOf[FlinkJobInfo])
+ /**
+ * Job log directory suffix
+ *
+ * @return
+ */
+ override def getLogDirSuffix: String = this.logDirSuffix
+
+ override def setLogDirSuffix(logDirSuffix: String): Unit = {
+ this.logDirSuffix = logDirSuffix
+ }
+
+ /**
+ * Engine type
+ *
+ * @return
+ */
+ override def getEngineType: String = engineType
+
+ def setEngineType(engineType: String): Unit = {
+ this.engineType = engineType
+ }
+ /**
+ * Client type
+ *
+ * @return
+ */
+ override def getClientType: String = clientType
+
+ def setClientType(clientType: String): Unit = {
+ this.clientType = clientType
+ }
+ /**
+ * Engine version
+ *
+ * @return
+ */
+ override def getEngineVersion: String = engineVersion
+
+ def setEngineVersion(version: String): Unit = {
+ this.engineVersion = version
+ }
+
+ def getJobParams(): util.Map[String, Object] = jobParams
+
+ def setJobParams(params: util.Map[String, Object]): EngineConnJobInfo = {
+ this.jobParams = params
+ this
+ }
+
+ def getEcInstance(): ServiceInstance = ecInstance
+
+ def setEcInstance(instance: ServiceInstance): EngineConnJobInfo = {
+ this.ecInstance = instance
+ this
}
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/FlinkRestJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/FlinkRestJobInfo.scala
new file mode 100644
index 000000000..62a3f9607
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/FlinkRestJobInfo.scala
@@ -0,0 +1,153 @@
+///*
+// * Copyright 2021 WeBank
+// * Licensed under the Apache License, Version 2.0 (the "License");
+// * you may not use this file except in compliance with the License.
+// * You may obtain a copy of the License at
+// *
+// * http://www.apache.org/licenses/LICENSE-2.0
+// *
+// * Unless required by applicable law or agreed to in writing, software
+// * distributed under the License is distributed on an "AS IS" BASIS,
+// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// * See the License for the specific language governing permissions and
+// * limitations under the License.
+// */
+//
+//package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo
+//
+//import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+//import com.webank.wedatasphere.streamis.jobmanager.launcher.job.`type`.JobClientType
+//import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
+//
+//import java.util
+//
+//
+//class FlinkRestJobInfo extends YarnJobInfo with RestJobInfo {
+//
+// private var id: String = _
+// private var name: String = _
+// private var user: String = _
+// private var status: String = _
+// private var logPath: String = _
+// private var resources: java.util.Map[String, Object] = _
+// private var completedMsg: String = _
+// private var jobStates: Array[JobStateInfo] = _
+// private var engineType: String = "flink"
+// private var clientType: JobClientType.Value = JobClientType.DETACH
+//
+// private var savepoint: String = _
+// private var checkpoint: String = _
+// private var applicationId: String = _
+// private var applicationUrl: String = _
+//
+// private var jobHeartbeatMessage: JobHeartbeatMessage = _
+//
+// override def getApplicationId: String = applicationId
+//
+// def setApplicationId(applicationId: String): Unit = this.applicationId = applicationId
+//
+// override def getApplicationUrl: String = applicationUrl
+//
+// def setApplicationUrl(applicationUrl: String): Unit = this.applicationUrl = applicationUrl
+//
+// override def getId: String = id
+//
+// def setId(id: String): Unit = this.id = id
+//
+// override def getUser: String = user
+//
+// def setUser(user: String): Unit = this.user = user
+//
+// override def getStatus: String = status
+//
+// override def setStatus(status: String): Unit = this.status = status
+//
+// override def getLogPath: String = logPath
+//
+// def setLogPath(logPath: String): Unit = this.logPath = logPath
+//
+// override def getResources: util.Map[String, Object] = resources
+//
+// def setResources(resources: java.util.Map[String, Object]): Unit = this.resources = resources
+//
+// def getSavepoint: String = savepoint
+//
+// def setSavepoint(savepoint: String): Unit = this.savepoint = savepoint
+//
+// def getCheckpoint: String = checkpoint
+//
+// def setCheckpoint(checkpoint: String): Unit = this.checkpoint = checkpoint
+//
+// override def getCompletedMsg: String = completedMsg
+//
+// def setCompletedMsg(completedMsg: String): Unit = this.completedMsg = completedMsg
+//
+// override def toString: String = s"FlinkJobInfo(id: $id, status: $status, applicationId: $applicationId, applicationUrl: $applicationUrl, logPath: $logPath)"
+//
+// /**
+// * Contains the check point and save points
+// *
+// * @return
+// */
+// override def getJobStates: Array[JobStateInfo] = {
+// jobStates
+// }
+//
+// def setJobStates(jobStates: Array[JobStateInfo]): Unit = {
+// this.jobStates = jobStates
+// }
+//
+// /**
+// * Job name
+// *
+// * @return name
+// */
+// override def getName: String = name
+//
+// def setName(name: String): Unit = {
+// this.name = name
+// }
+//
+// /**
+// * Engine type
+// *
+// * @return
+// */
+// override def getEngineType: String = engineType
+//
+// def setEngineType(engineType: String): Unit = {
+// this.engineType = engineType
+// }
+// /**
+// * Client type
+// *
+// * @return
+// */
+// override def getClientType: JobClientType.Value = clientType
+//
+// def setClientType(clientType: JobClientType.Value): Unit = {
+// this.clientType = clientType
+// }
+// /**
+// * Engine version
+// *
+// * @return
+// */
+// override def getEngineVersion: String = "1.12.2"
+//
+// def setEngineVersion(version: String): Unit = {
+//
+// }
+//
+// /**
+// * Rest job heartbeat message
+// *
+// * @return
+// */
+// override def getMessage: JobHeartbeatMessage = this.jobHeartbeatMessage
+//
+// override def setMessage(message: JobHeartbeatMessage): Unit = {
+// this.jobHeartbeatMessage = message
+// }
+//}
+//
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/LinkisJobInfo.scala
similarity index 67%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/LinkisJobInfo.scala
index da02fda50..666b27153 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/LinkisJobInfo.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/LinkisJobInfo.scala
@@ -1,4 +1,4 @@
-package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
import org.apache.linkis.common.ServiceInstance
@@ -7,9 +7,17 @@ trait LinkisJobInfo extends JobInfo {
/**
* Fetch engine conn manager instance info
+ *
* @return
*/
def getECMInstance: ServiceInstance
+ /**
+ * Job log directory suffix
+ *
+ * @return
+ */
+ def getLogDirSuffix: String
+ def setLogDirSuffix(logDirSuffix: String): Unit
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/RestJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/RestJobInfo.scala
new file mode 100644
index 000000000..07e850678
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/RestJobInfo.scala
@@ -0,0 +1,17 @@
+//package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo
+//
+//import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+//import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+//
+//trait RestJobInfo extends JobInfo {
+//
+// /**
+// * Rest job heartbeat message
+// *
+// * @return
+// */
+// def getMessage: JobHeartbeatMessage
+//
+// def setMessage(message: JobHeartbeatMessage): Unit
+//
+//}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/SparkJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/SparkJobInfo.scala
new file mode 100644
index 000000000..f73dfb489
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/SparkJobInfo.scala
@@ -0,0 +1,114 @@
+//package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo
+//
+//import com.webank.wedatasphere.streamis.jobmanager.entrypoint.message.JobHeartbeatMessage
+//import com.webank.wedatasphere.streamis.jobmanager.launcher.job.`type`.JobClientType
+//import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobStateInfo
+//
+//import java.util
+//
+//class SparkJobInfo extends RestJobInfo {
+// private var id: String = _
+// private var name: String = _
+// private var user: String = _
+// private var status: String = _
+// private var logPath: String = _
+// private var resources: java.util.Map[String, Object] = _
+// private var completedMsg: String = _
+// private var jobStates: Array[JobStateInfo] = _
+// private var engineType: String = "spark"
+// private var clientType: JobClientType.Value = JobClientType.DETACH
+//
+// private var jobHeartbeatMessage: JobHeartbeatMessage = _
+//
+// override def getId: String = id
+//
+// def setId(id: String): Unit = this.id = id
+//
+// override def getUser: String = user
+//
+// def setUser(user: String): Unit = this.user = user
+//
+// override def getStatus: String = status
+//
+// override def setStatus(status: String): Unit = this.status = status
+//
+// override def getLogPath: String = logPath
+//
+// def setLogPath(logPath: String): Unit = this.logPath = logPath
+//
+// override def getResources: util.Map[String, Object] = resources
+//
+// def setResources(resources: java.util.Map[String, Object]): Unit = this.resources = resources
+//
+// override def getCompletedMsg: String = completedMsg
+//
+// def setCompletedMsg(completedMsg: String): Unit = this.completedMsg = completedMsg
+//
+// override def toString: String = s"FlinkJobInfo(id: $id, status: $status, logPath: $logPath)"
+//
+// /**
+// * Contains the check point and save points
+// *
+// * @return
+// */
+// override def getJobStates: Array[JobStateInfo] = {
+// jobStates
+// }
+//
+// def setJobStates(jobStates: Array[JobStateInfo]): Unit = {
+// this.jobStates = jobStates
+// }
+//
+// /**
+// * Job name
+// *
+// * @return name
+// */
+// override def getName: String = name
+//
+// def setName(name: String): Unit = {
+// this.name = name
+// }
+//
+// /**
+// * Engine type
+// *
+// * @return
+// */
+// override def getEngineType: String = engineType
+//
+// def setEngineType(engineType: String): Unit = {
+// this.engineType = engineType
+// }
+// /**
+// * Client type
+// *
+// * @return
+// */
+// override def getClientType: JobClientType.Value = clientType
+//
+// def setClientType(clientType: JobClientType.Value): Unit = {
+// this.clientType = clientType
+// }
+// /**
+// * Engine version
+// *
+// * @return
+// */
+// override def getEngineVersion: String = "1.12.2"
+//
+// def setEngineVersion(version: String): Unit = {
+//
+// }
+//
+// /**
+// * Rest job heartbeat message
+// *
+// * @return
+// */
+// override def getMessage: JobHeartbeatMessage = this.jobHeartbeatMessage
+//
+// override def setMessage(message: JobHeartbeatMessage): Unit = {
+// this.jobHeartbeatMessage = message
+// }
+//}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/YarnJobInfo.scala
similarity index 51%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/YarnJobInfo.scala
index d2907f901..4046daca7 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/YarnJobInfo.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/jobInfo/YarnJobInfo.scala
@@ -1,6 +1,8 @@
-package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo
-trait YarnJobInfo extends LinkisJobInfo {
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+
+trait YarnJobInfo extends JobInfo {
def getApplicationId: String
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala
index 767b9e58e..267d3bf73 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/AbstractJobStateManager.scala
@@ -107,7 +107,7 @@ abstract class AbstractJobStateManager extends JobStateManager {
path = path.replaceAll("/+", "/")
// Replace "." to "/"
path = path.replaceAll("\\.", "/")
- if (path.endsWith("/") && !(path == "/") && !WINDOWS_ROOT_DIR_REGEX.pattern.matcher(path).matches()) path = path.substring(0, path.length - "/".length)
+ if (path.endsWith("/") && (path != "/") && !WINDOWS_ROOT_DIR_REGEX.pattern.matcher(path).matches()) path = path.substring(0, path.length - "/".length)
path
}
def getJobStateRootPath[T <: JobState](clazz: Class[_], schema: String): String
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala
index a00edd8e8..d7aceedb1 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobLaunchManager.scala
@@ -15,32 +15,38 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.JobClientType
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobStateManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration.{VAR_FLINK_APP_NAME, VAR_FLINK_SAVEPOINT_PATH}
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.exception.FlinkJobLaunchErrorException
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.{EngineConnJobInfo, LinkisJobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.factory.AbstractJobClientFactory
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.FlinkJobLaunchManager.EXCEPTION_PATTERN
+import org.apache.commons.lang3.StringEscapeUtils
import org.apache.linkis.common.utils.{Logging, Utils}
import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob}
import org.apache.linkis.computation.client.utils.LabelKeyUtils
import org.apache.linkis.protocol.utils.TaskUtils
+import scala.util.matching.Regex
trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging {
- protected var jobStateManager: JobStateManager = _
-
protected def buildOnceJob(job: LaunchJob): SubmittableOnceJob
protected def createSubmittedOnceJob(id: String, jobInfo: LinkisJobInfo): OnceJob
-
protected def createJobInfo(onceJob: SubmittableOnceJob, job: LaunchJob, jobState: JobState): LinkisJobInfo
protected def createJobInfo(jobInfo: String): LinkisJobInfo
+ protected var jobStateManager: JobStateManager = _
+
+
/**
* This method is used to launch a new job.
*
@@ -49,8 +55,8 @@ trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging {
* @return the job id.
*/
override def innerLaunch(job: LaunchJob, jobState: JobState): JobClient[LinkisJobInfo] = {
- // Transform the JobState into the params in LaunchJob
- Option(jobState).foreach(state => {
+ // Transform the JobState(isRestore = true) into the params in LaunchJob
+ Option(jobState).filter(jobState => jobState.isRestore).foreach(state => {
val startUpParams = TaskUtils.getStartupMap(job.getParams)
startUpParams.putIfAbsent(VAR_FLINK_SAVEPOINT_PATH.getValue,
state.getLocation.toString)
@@ -65,26 +71,44 @@ trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging {
job.getLabels.get(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY) match {
case engineConnType: String =>
if(!engineConnType.toLowerCase.startsWith(FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE))
- throw new FlinkJobLaunchErrorException(30401, s"Only ${FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE} job is supported to be launched to Linkis, but $engineConnType is found.", null)
- case _ => throw new FlinkJobLaunchErrorException(30401, s"Not exists ${LabelKeyUtils.ENGINE_TYPE_LABEL_KEY}, StreamisJob cannot be submitted to Linkis successfully.", null)
+ throw new FlinkJobLaunchErrorException(30401, s"Only ${FlinkJobLaunchManager.FLINK_ENGINE_CONN_TYPE} job is supported to be launched to Linkis, but $engineConnType is found.(不识别的引擎类型)", null)
+
+ case _ => throw new FlinkJobLaunchErrorException(30401, s"Not exists ${LabelKeyUtils.ENGINE_TYPE_LABEL_KEY}(缺少引擎标签), StreamisJob cannot be submitted to Linkis successfully.", null)
}
Utils.tryCatch {
val onceJob = buildOnceJob(job)
onceJob.submit()
- val jobInfo = Utils.tryCatch(createJobInfo(onceJob, job, jobState)) {
+ val jobInfo: LinkisJobInfo = Utils.tryCatch(createJobInfo(onceJob, job, jobState)) {
case e: FlinkJobLaunchErrorException =>
throw e
case t: Throwable =>
error(s"${job.getSubmitUser} create jobInfo failed, now stop this EngineConn ${onceJob.getId}.")
- Utils.tryAndWarn(onceJob.kill())
- throw new FlinkJobLaunchErrorException(-1, "Fail to obtain launched job info", t)
+ Utils.tryQuietly(onceJob.kill())
+ var stopMsg = ""
+ Utils.tryCatch {
+ val tmpJobInfo = new EngineConnJobInfo
+ tmpJobInfo.setName(StringEscapeUtils.escapeJava(job.getJobName))
+ tmpJobInfo.setId(onceJob.getId)
+ tmpJobInfo.setUser(job.getSubmitUser)
+ val startupMap = TaskUtils.getStartupMap(job.getParams)
+ val managerMode = startupMap.getOrDefault(JobLauncherConfiguration.MANAGER_MODE_KEY.getValue, JobClientType.ATTACH.getName).toString.toLowerCase()
+ tmpJobInfo.setClientType(managerMode)
+ AbstractJobClientFactory.getJobManager().createJobClient(onceJob, tmpJobInfo, getJobStateManager).stop()
+ } {
+ case e: Exception =>
+ val msg = s"Failed to kill job with id : ${onceJob.getId}, because : ${e.getMessage}, please go to check the app in yarn(停止APP失败,请上yarn查看)"
+ logger.error(msg)
+ stopMsg = msg
+ }
+ throw new FlinkJobLaunchErrorException(-1, exceptionAnalyze(s"Fail to obtain launched job info(获取任务信息失败,引擎服务可能启动失败). ${stopMsg}", t), t)
}
- createJobClient(onceJob, jobInfo)
+ val client = AbstractJobClientFactory.getJobManager().createJobClient(onceJob, jobInfo, getJobStateManager)
+ client
}{
case e: FlinkJobLaunchErrorException => throw e
case t: Throwable =>
error(s"Server Exception in submitting Flink job [${job.getJobName}] to Linkis remote server", t)
- throw new FlinkJobLaunchErrorException(-1, s"Exception in submitting Flink job to Linkis remote server (提交至Linkis服务失败,请检查服务及网络)", t)
+ throw new FlinkJobLaunchErrorException(-1, exceptionAnalyze(s"Exception in submitting Flink job to Linkis remote server (提交至Linkis服务失败,请检查服务及网络)", t), t)
}
}
@@ -97,8 +121,9 @@ trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging {
connect(id, createJobInfo(jobInfo))
}
+
override def connect(id: String, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] = {
- createJobClient(createSubmittedOnceJob(id, jobInfo), jobInfo)
+ AbstractJobClientFactory.getJobManager().createJobClient(createSubmittedOnceJob(id, jobInfo), jobInfo, getJobStateManager)
}
@@ -120,13 +145,23 @@ trait FlinkJobLaunchManager extends LinkisJobLaunchManager with Logging {
}
/**
- * Create job client
- * @param onceJob once job
- * @param jobInfo job info
+ * Exception analyzer
+ * @param errorMsg error message
+ * @param t throwable
* @return
*/
- protected def createJobClient(onceJob: OnceJob, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo]
+ def exceptionAnalyze(errorMsg: String, t: Throwable): String = {
+ EXCEPTION_PATTERN.findFirstMatchIn(t.getMessage) match {
+ case Some(m) =>
+ errorMsg + s", 原因分析[${m.group(1)}]"
+ case _ => errorMsg
+ }
+ }
}
+
object FlinkJobLaunchManager {
val FLINK_ENGINE_CONN_TYPE = "flink"
+
+ val EXCEPTION_PATTERN: Regex = "[\\s\\S]+,desc:([\\s\\S]+?),(ip|port|serviceKind)[\\s\\S]+$".r
+
}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala
index 75a4af2b1..6eb03d463 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/FlinkJobStateManager.scala
@@ -1,7 +1,7 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, CheckpointJobStateFetcher, Savepoint, SavepointJobStateFetcher}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{FlinkCheckpoint, FlinkCheckpointJobStateFetcher, FlinkSavepoint, FlinkSavepointJobStateFetcher}
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.url.LinkisURLStreamHandlerFactory
import org.apache.linkis.common.utils.{Logging, Utils}
@@ -17,8 +17,8 @@ class FlinkJobStateManager extends AbstractJobStateManager with Logging{
* State type => root path
*/
val stateRootPath: Map[String, String] = Map(
- classOf[Savepoint].getCanonicalName -> JobLauncherConfiguration.FLINK_SAVEPOINT_PATH.getValue,
- classOf[Checkpoint].getCanonicalName -> JobLauncherConfiguration.FLINK_CHECKPOINT_PATH.getValue
+ classOf[FlinkSavepoint].getCanonicalName -> JobLauncherConfiguration.FLINK_SAVEPOINT_PATH.getValue,
+ classOf[FlinkCheckpoint].getCanonicalName -> JobLauncherConfiguration.FLINK_CHECKPOINT_PATH.getValue
)
override def getJobStateRootPath[T <: JobState](clazz: Class[_], schema: String): String = {
@@ -30,9 +30,9 @@ class FlinkJobStateManager extends AbstractJobStateManager with Logging{
*/
override def init(): Unit = {
info("Register the loader for JobState fetcher")
- // TODO register the fetcher
- registerJobStateFetcher(classOf[Checkpoint], () => new CheckpointJobStateFetcher(classOf[Checkpoint], this))
- registerJobStateFetcher(classOf[Savepoint], () => new SavepointJobStateFetcher(classOf[Savepoint], this))
+
+ registerJobStateFetcher(classOf[FlinkCheckpoint], () => new FlinkCheckpointJobStateFetcher(classOf[FlinkCheckpoint], this))
+ registerJobStateFetcher(classOf[FlinkSavepoint], () => new FlinkSavepointJobStateFetcher(classOf[FlinkSavepoint], this))
}
/**
@@ -51,6 +51,6 @@ object FlinkJobStateManager{
URL.setURLStreamHandlerFactory(new LinkisURLStreamHandlerFactory(JobLauncherConfiguration.FLINK_STATE_SUPPORT_SCHEMES.getValue.split(","): _*))
def main(args: Array[String]): Unit = {
-
+ //nothing
}
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala
index 26d76cfdf..b7081b860 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/LinkisJobLaunchManager.scala
@@ -4,7 +4,7 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunc
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.LinkisJobLaunchManager.LINKIS_JAR_VERSION_PATTERN
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.StringUtils
@@ -17,6 +17,7 @@ import scala.collection.JavaConverters._
import scala.util.matching.Regex
trait LinkisJobLaunchManager extends JobLaunchManager[LinkisJobInfo] with Logging{
+
/**
* This method is used to launch a new job.
*
@@ -40,6 +41,7 @@ trait LinkisJobLaunchManager extends JobLaunchManager[LinkisJobInfo] with Loggin
linkisVersion = version
}
}
+
if (StringUtils.isNotBlank(linkisVersion)){
val versionSplitter: Array[String] = linkisVersion.split("\\.")
val major = Integer.valueOf(versionSplitter(0))
@@ -62,13 +64,13 @@ trait LinkisJobLaunchManager extends JobLaunchManager[LinkisJobInfo] with Loggin
innerLaunch(job, jobState)
}
- private def changeUnitOfMemoryToG(params: util.Map[String, Any], name: String): Unit = {
+ private def changeUnitOfMemoryToG(params: util.Map[String, AnyRef], name: String): Unit = {
params.get(name) match {
case memory: String =>
var actualMem = Integer.valueOf(memory) / 1024
actualMem = if (actualMem <= 0) 1 else actualMem
info(s"Change the unit of startup param: [${name}], value [${memory}] => [${actualMem}]")
- params.put(name, actualMem)
+ params.put(name, actualMem.toString)
case _ => // Ignores
}
}
@@ -78,7 +80,7 @@ trait LinkisJobLaunchManager extends JobLaunchManager[LinkisJobInfo] with Loggin
* @param params params
* @param prefix prefix
*/
- private def avoidParamsPrefix(params: util.Map[String, Any], prefix: String): util.Map[String, Any] = {
+ private def avoidParamsPrefix(params: util.Map[String, AnyRef], prefix: String): util.Map[String, AnyRef] = {
params.asScala.map{
case (key, value) =>
if (key.startsWith(prefix)){
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala
index 6ad2e4f88..7b47bc704 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/manager/SimpleFlinkJobLaunchManager.scala
@@ -15,17 +15,26 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.JobClientType
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.errorcode.JobLaunchErrorCode
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobState, JobStateInfo}
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, LaunchJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobClient, JobInfo, LaunchJob}
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo, LinkisJobInfo}
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager.INSTANCE_NAME
-import org.apache.commons.lang.StringEscapeUtils
-import org.apache.linkis.common.utils.{RetryHandler, Utils}
-import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SubmittableSimpleOnceJob}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.EngineConnJobInfo
+import org.apache.commons.lang3.{StringEscapeUtils, StringUtils}
+import org.apache.linkis.common.ServiceInstance
+import org.apache.linkis.common.exception.LinkisRetryException
+import org.apache.linkis.common.utils.{JsonUtils, RetryHandler, Utils}
+import org.apache.linkis.computation.client.once.action.GetEngineConnAction
+import org.apache.linkis.computation.client.once.simple.{SimpleOnceJob, SimpleOnceJobBuilder, SubmittableSimpleOnceJob}
import org.apache.linkis.computation.client.once.{OnceJob, SubmittableOnceJob}
-import org.apache.linkis.computation.client.operator.impl.{EngineConnApplicationInfoOperator, EngineConnLogOperator}
+import org.apache.linkis.computation.client.operator.impl.EngineConnApplicationInfoOperator
+import org.apache.linkis.governance.common.constant.ec.ECConstants
import org.apache.linkis.httpclient.dws.DWSHttpClient
+import org.apache.linkis.protocol.utils.TaskUtils
import org.apache.linkis.ujes.client.exception.UJESJobException
import java.util
@@ -41,7 +50,7 @@ class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager {
if(job.getLaunchConfigs != null) {
job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_CREATE_SERVICE).foreach{ case createService: String => builder.setCreateService(createService)}
job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_DESCRIPTION).foreach{ case desc: String => builder.setDescription(desc)}
- job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME).foreach{ case maxSubmitTime: Long => builder.setMaxSubmitTime(maxSubmitTime)}
+ job.getLaunchConfigs.asScala.get(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME).foreach{ case maxSubmitTime: String => builder.setMaxSubmitTime(maxSubmitTime.toLong)}
}
builder.build()
}
@@ -51,16 +60,23 @@ class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager {
override protected def createJobInfo(onceJob: SubmittableOnceJob, job: LaunchJob, jobState: JobState): LinkisJobInfo = {
val nodeInfo = onceJob.getNodeInfo
- val jobInfo = new FlinkJobInfo
+ val jobInfo = new EngineConnJobInfo
// Escape the job name
jobInfo.setName(StringEscapeUtils.escapeJava(job.getJobName))
jobInfo.setId(onceJob.getId)
jobInfo.setUser(job.getSubmitUser)
onceJob match {
- case simpleOnceJob: SubmittableSimpleOnceJob =>
- jobInfo.setECMInstance(simpleOnceJob.getECMServiceInstance)
+ case submittableSimpleOnceJob: SubmittableSimpleOnceJob =>
+ jobInfo.setEcInstance(submittableSimpleOnceJob.getEcServiceInstance)
+ jobInfo.setECMInstance(submittableSimpleOnceJob.getECMServiceInstance)
case _ =>
+ val typeStr = if (null == onceJob) "null" else onceJob.getClass.getName
+ logger.error(s"Invalid job type : ${typeStr}, only SubmittableSimpleOnceJob is supported")
}
+ val startupMap = TaskUtils.getStartupMap(job.getParams)
+ val managerMode = startupMap.getOrDefault(JobLauncherConfiguration.MANAGER_MODE_KEY.getValue, JobClientType.ATTACH.getName).toString.toLowerCase()
+ jobInfo.setClientType(managerMode)
+ logger.info(s"Job manager mode : ${managerMode}")
Utils.tryCatch(fetchApplicationInfo(onceJob, jobInfo)) { t =>
val message = s"Unable to fetch the application info of launched job [${job.getJobName}], maybe the engine has been shutdown"
error(message, t)
@@ -68,20 +84,44 @@ class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager {
jobInfo.setStatus("failed")
jobInfo.setCompletedMsg(message)
}
+ jobInfo.setJobParams(job.getParams.asInstanceOf[util.Map[String, Object]])
jobInfo.setResources(nodeInfo.get("nodeResource").asInstanceOf[util.Map[String, Object]])
// Set job state info into
-// Option(jobState).foreach(state => {
-// val stateInfo = new JobStateInfo
-// stateInfo.setTimestamp(state.getTimestamp)
-// stateInfo.setLocation(state.getLocation.toString)
-// jobInfo.setJobStates(Array(stateInfo))
-// })
+ Option(jobState).foreach(state => {
+ val stateInfo = new JobStateInfo(state.getLocation.toString, state.getTimestamp, state.isRestore)
+ jobInfo.setJobStates(Array(stateInfo))
+ })
jobInfo
}
- override protected def createJobInfo(jobInfo: String): LinkisJobInfo = DWSHttpClient.jacksonJson.readValue(jobInfo, classOf[FlinkJobInfo])
+ override protected def createJobInfo(jobInfo: String): LinkisJobInfo = {
+ if (StringUtils.isNotBlank(jobInfo)) {
+ DWSHttpClient.jacksonJson.readValue(jobInfo, classOf[EngineConnJobInfo])
+ } else {
+ null
+ }
+ }
- protected def fetchApplicationInfo(onceJob: OnceJob, jobInfo: FlinkJobInfo): Unit = {
+ protected def fetchApplicationInfo(onceJob: OnceJob, jobInfo: EngineConnJobInfo): Unit = {
+ val isDetach = JobClientType.DETACH.toString.equalsIgnoreCase(jobInfo.getClientType)
+ if (isDetach) {
+ val retryHandler = new RetryHandler {}
+ retryHandler.setRetryNum(JobLauncherConfiguration.FLINK_FETCH_APPLICATION_INFO_MAX_TIMES.getValue)
+ retryHandler.setRetryMaxPeriod(5000)
+ retryHandler.setRetryPeriod(500)
+ retryHandler.addRetryException(classOf[UJESJobException])
+ retryHandler.addRetryException(classOf[LinkisRetryException])
+ retryHandler.addRetryException(classOf[NullPointerException])
+ var ecInstance: ServiceInstance = null
+ var ecTicketId: String = null
+ onceJob match {
+ case submittableSimpleOnceJob: SubmittableSimpleOnceJob=>
+ ecInstance = submittableSimpleOnceJob.getEcServiceInstance
+ ecTicketId = submittableSimpleOnceJob.getEcTicketId
+ case _ =>
+ }
+ retryHandler.retry(getEcMetrics(ecTicketId, ecInstance, jobInfo), "GetEcMetrics")
+ } else {
onceJob.getOperator(EngineConnApplicationInfoOperator.OPERATOR_NAME) match {
case applicationInfoOperator: EngineConnApplicationInfoOperator =>
val retryHandler = new RetryHandler {}
@@ -95,21 +135,38 @@ class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager {
}
}
- /**
- * Create job client
- *
- * @param onceJob once job
- * @param jobInfo job info
- * @return
- */
- override protected def createJobClient(onceJob: OnceJob, jobInfo: LinkisJobInfo): JobClient[LinkisJobInfo] = {
- jobInfo match {
- case flinkJobInfo: FlinkJobInfo =>
- new FlinkJobClient(onceJob, flinkJobInfo, this.jobStateManager).asInstanceOf[JobClient[LinkisJobInfo]]
- case _ => null
+ }
+
+
+ def getEcMetrics(ecTickecId: String, ecInstance: ServiceInstance, jobInfo: EngineConnJobInfo): Unit = {
+ val getEngingConnAction = new GetEngineConnAction
+ getEngingConnAction.setUser("hadoop")
+ if (StringUtils.isNotBlank(ecTickecId)) {
+ getEngingConnAction.addRequestPayload(JobConstants.EC_TICKET_ID_KEY, ecTickecId)
+ }
+ if (null != ecInstance) {
+ getEngingConnAction.addRequestPayload(JobConstants.APP_NAME_KEY, ecInstance.getApplicationName)
+ getEngingConnAction.addRequestPayload(JobConstants.INSTANCE_KEY, ecInstance.getInstance)
+ }
+ val rs = SimpleOnceJobBuilder.getLinkisManagerClient.getEngineConn(getEngingConnAction)
+ val metricsStr = rs.getData.getOrDefault(JobConstants.RESULT_EC_METRICS_KEY, "{}")
+ val metricsMap = if (null != metricsStr) {
+ JsonUtils.jackson.readValue(metricsStr.toString, classOf[util.Map[String, AnyRef]])
+ } else {
+ logger.warn("metrics: \n" + JsonUtils.jackson.writeValueAsString(rs))
+ throw new LinkisRetryException(JobLaunchErrorCode.JOB_EC_METRICS_ERROR, "Got null metrics.")
}
+ val applicationId = if (metricsMap.containsKey(ECConstants.YARN_APPID_NAME_KEY)) {
+ metricsMap.get(ECConstants.YARN_APPID_NAME_KEY)
+ } else {
+ logger.warn("metrics: \n" + JsonUtils.jackson.writeValueAsString(rs))
+ throw new LinkisRetryException(JobLaunchErrorCode.JOB_EC_METRICS_ERROR, "Got no appId.")
+ }
+ jobInfo.setApplicationId(applicationId.toString)
+ jobInfo.setApplicationUrl(metricsMap.getOrDefault(ECConstants.YARN_APP_URL_KEY, "").toString)
}
+
/**
* Init method
*/
@@ -129,5 +186,5 @@ class SimpleFlinkJobLaunchManager extends FlinkJobLaunchManager {
}
object SimpleFlinkJobLaunchManager{
- val INSTANCE_NAME = "simpleFlink";
+ val INSTANCE_NAME = "flink";
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala
new file mode 100644
index 000000000..a41018a74
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkClientLogOperator.scala
@@ -0,0 +1,34 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator
+
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.computation.client.once.action.EngineConnOperateAction
+import org.apache.linkis.computation.client.operator.impl.{EngineConnLogOperator, EngineConnLogs}
+
+/**
+ * Append "logDirSuffix" parameter
+ */
+class FlinkClientLogOperator extends EngineConnLogOperator{
+
+ private var logDirSuffix: String = _
+
+ def setLogDirSuffix(logDirSuffix: String): Unit = {
+ this.logDirSuffix = logDirSuffix
+ }
+
+ protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = {
+ builder.operatorName(EngineConnLogOperator.OPERATOR_NAME)
+ if (StringUtils.isNotBlank(this.logDirSuffix)) {
+ builder.addParameter("logDirSuffix", logDirSuffix)
+ }
+ super.addParameters(builder)
+ }
+
+
+ override def getTicketId: String = super.getTicketId
+
+ override def getName: String = FlinkClientLogOperator.OPERATOR_NAME
+}
+
+object FlinkClientLogOperator {
+ val OPERATOR_NAME = "engineConnLog_flink"
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala
index 993847836..e00628eb4 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkTriggerSavepointOperator.scala
@@ -15,7 +15,8 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.operator
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.Savepoint
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.FlinkSavepoint
import org.apache.linkis.computation.client.once.action.EngineConnOperateAction
import org.apache.linkis.computation.client.once.result.EngineConnOperateResult
import org.apache.linkis.computation.client.operator.OnceJobOperator
@@ -23,7 +24,7 @@ import org.apache.linkis.computation.client.operator.OnceJobOperator
/**
* Flink trigger savepoint operator
*/
-class FlinkTriggerSavepointOperator extends OnceJobOperator[Savepoint]{
+class FlinkTriggerSavepointOperator extends OnceJobOperator[FlinkSavepoint]{
/**
* Save point directory
@@ -35,6 +36,12 @@ class FlinkTriggerSavepointOperator extends OnceJobOperator[Savepoint]{
*/
private var mode: String = _
+ /**
+ * ApplicationId
+ * @param savepointDir
+ */
+ private var appId: String = _
+
def setSavepointDir(savepointDir: String): Unit ={
this.savepointDir = savepointDir
}
@@ -43,15 +50,20 @@ class FlinkTriggerSavepointOperator extends OnceJobOperator[Savepoint]{
this.mode = mode
}
+ def setApplicationId(appId: String): Unit = {
+ this.appId = appId
+ }
+
override protected def addParameters(builder: EngineConnOperateAction.Builder): Unit = {
builder.addParameter("savepointPath", savepointDir)
builder.addParameter("mode", mode)
+ builder.addParameter(JobConstants.APPLICATION_ID_KEY, appId)
}
- override protected def resultToObject(result: EngineConnOperateResult): Savepoint = {
+ override protected def resultToObject(result: EngineConnOperateResult): FlinkSavepoint = {
val savepointPath:String = result.getAs("writtenSavepoint")
info(s"Get the savepoint store path: [$savepointPath] form ${FlinkTriggerSavepointOperator.OPERATOR_NAME} operation")
- new Savepoint(savepointPath)
+ new FlinkSavepoint(savepointPath)
}
override def getName: String = FlinkTriggerSavepointOperator.OPERATOR_NAME
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala
index a24e12580..975b23405 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/operator/FlinkYarnLogOperator.scala
@@ -19,9 +19,9 @@ import org.apache.linkis.computation.client.once.action.EngineConnOperateAction
import org.apache.linkis.computation.client.operator.impl.EngineConnLogOperator
/**
- * Extend the engine conn log operator
+ * Extend the flink client log operator
*/
-class FlinkYarnLogOperator extends EngineConnLogOperator{
+class FlinkYarnLogOperator extends FlinkClientLogOperator {
private var applicationId: String = _
@@ -30,8 +30,9 @@ class FlinkYarnLogOperator extends EngineConnLogOperator{
}
protected override def addParameters(builder: EngineConnOperateAction.Builder): Unit = {
- builder.addParameter("yarnApplicationId", this.applicationId)
super.addParameters(builder)
+ builder.operatorName(getName)
+ builder.addParameter("yarnApplicationId", this.applicationId)
}
override def getName: String = FlinkYarnLogOperator.OPERATOR_NAME
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkCheckpoint.scala
similarity index 91%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkCheckpoint.scala
index a5e42e599..6193ae7a5 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Checkpoint.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkCheckpoint.scala
@@ -15,12 +15,12 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobGenericState, JobState}
/**
* Hold the check point information
*/
-class Checkpoint(location: String) extends GenericFlinkJobState(location) with JobState {
+class FlinkCheckpoint(location: String) extends JobGenericState(location) with JobState {
/**
* Record the sequence of checkpoint
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkSavepoint.scala
similarity index 88%
rename from streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala
rename to streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkSavepoint.scala
index fd91292c6..a2b9e930f 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/Savepoint.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/FlinkSavepoint.scala
@@ -15,8 +15,8 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobGenericState, JobState}
-class Savepoint(location: String) extends GenericFlinkJobState(location) with JobState {
+class FlinkSavepoint(location: String) extends JobGenericState(location) with JobState {
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala
deleted file mode 100644
index 32aa1bc86..000000000
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/job/state/GenericFlinkJobState.scala
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state
-
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
-
-import java.net.URI
-
-/**
- * Generic flink job state
- */
-class GenericFlinkJobState(location: String) extends JobState{
-
- private var timestamp: Long = -1
-
- private var id: String = "{ID}"
-
- private var metadataInfo: Any = _
-
- override def getLocation: URI = URI.create(location)
-
- override def getMetadataInfo: Any = {
- metadataInfo
- }
-
- def setMetadataInfo(metadataInfo: Any): Unit = {
- this.metadataInfo = metadataInfo
- }
-
- /**
- * Job state id
- *
- * @return
- */
- override def getId: String = id
-
- def setId(id: String): Unit = {
- this.id = id
- }
- /**
- * Timestamp to save the state
- *
- * @return
- */
- override def getTimestamp: Long = timestamp
-
- def setTimestamp(timestamp: Long): Unit = {
- this.timestamp = timestamp
- }
-}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/LinkisClientUtils.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/LinkisClientUtils.scala
new file mode 100644
index 000000000..0430b0fd6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-linkis/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/linkis/utils/LinkisClientUtils.scala
@@ -0,0 +1,28 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.utils
+
+import org.apache.linkis.computation.client.once.{LinkisManagerClient, LinkisManagerClientImpl}
+import org.apache.linkis.computation.client.once.simple.SimpleOnceJobBuilder
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+
+object LinkisClientUtils {
+
+ private var linkisClient: DWSHttpClient = _
+
+ def getLinkisDwsClient: DWSHttpClient = {
+ if (null == linkisClient) {
+ this.synchronized {
+ if (null == linkisClient) {
+ linkisClient = SimpleOnceJobBuilder.getLinkisManagerClient match {
+ case client: LinkisManagerClient =>
+ val dwsClientField = classOf[LinkisManagerClientImpl].getDeclaredField("dwsHttpClient")
+ dwsClientField.setAccessible(true)
+ dwsClientField.get(client).asInstanceOf[DWSHttpClient]
+ case _ => null
+ }
+ }
+ }
+ }
+ linkisClient
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml
index 545888044..d0151ddeb 100755
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
../../pom.xml
4.0.0
@@ -33,6 +33,11 @@
+
+ com.webank.wedatasphere.streamis
+ streamis-job-launcher-linkis
+ ${jobmanager.version}
+
org.apache.linkis
linkis-mybatis
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java
index 58b781b1c..b8bdbf06f 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/JobLauncherAutoConfiguration.java
@@ -28,6 +28,7 @@
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
+import java.util.Locale;
import java.util.Objects;
@Configuration
@@ -37,8 +38,8 @@ public class JobLauncherAutoConfiguration {
public static final String DEFAULT_JOB_LAUNCH_MANGER = SimpleFlinkJobLaunchManager$.MODULE$.INSTANCE_NAME();
- @Bean(initMethod = "init", destroyMethod = "destroy")
- @ConditionalOnMissingBean(JobLaunchManager.class)
+ @Bean
+ @ConditionalOnMissingBean
@SuppressWarnings("unchecked")
public JobLaunchManager extends JobInfo> defaultJobLaunchManager(){
// First to scan the available job launch manager
@@ -53,12 +54,18 @@ public JobLaunchManager extends JobInfo> defaultJobLaunchManager(){
if (Objects.nonNull(constructor)){
try {
JobLaunchManager extends JobInfo> launchManager = (JobLaunchManager extends JobInfo>) constructor.newInstance();
- JobLaunchManager$.MODULE$.registerJobManager(launchManager.getName(), launchManager);
+ // Init launch Manager
+ launchManager.init();
+ JobLaunchManager$.MODULE$.registerJobManager(launchManager.getName().toLowerCase(Locale.ROOT), launchManager);
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
LOG.warn("Unable to instance the job launch manager: [{}]", clazz.getCanonicalName(), e);
}
}
});
+ // Add shutdown hook to destroy the launch manager
+ Runtime.getRuntime().addShutdownHook(new Thread(() ->
+ JobLaunchManager$.MODULE$.getJobManagers().forEach(JobLaunchManager::destroy)
+ ));
// Use the flink job launch manager as default
JobLaunchManager extends JobInfo> defaultManager = JobLaunchManager$.MODULE$.getJobManager(DEFAULT_JOB_LAUNCH_MANGER);
if (Objects.isNull(defaultManager)){
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml
index fc5427102..94556b3f9 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/launcher/dao/impl/StreamJobConfMapper.xml
@@ -60,7 +60,7 @@
- DELETE c FROM `linkis_stream_job_config` c INNER JOIN `linkis_stream_job_config_def` d ON c.job_id = 0 AND d.id = c.ref_def_id AND d.is_temp = 1;
+ DELETE c FROM `linkis_stream_job_config` c INNER JOIN `linkis_stream_job_config_def` d ON c.job_id = #{jobId} AND d.id = c.ref_def_id AND d.is_temp = 1;
INSERT INTO `linkis_stream_job_config`(`job_id`, `job_name`, `key`, `value`, `ref_def_id`) VALUES
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala
index f756daecc..75ad7d215 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConfKeyConstants.scala
@@ -16,6 +16,7 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.conf
import org.apache.linkis.common.conf.CommonVars
+import org.apache.linkis.governance.common.conf.GovernanceCommonConf
/**
@@ -23,6 +24,10 @@ import org.apache.linkis.common.conf.CommonVars
*/
object JobConfKeyConstants {
+ /**
+ * Config group for streamis internal configuration
+ */
+ val GROUP_INTERNAL: CommonVars[String] = CommonVars("wds.streamis.job.internal.config.group", "wds.streamis.internal.params")
/**
* Group: Flink extra
*/
@@ -85,4 +90,11 @@ object JobConfKeyConstants {
* Alert level
*/
val ALERT_LEVEL: CommonVars[String] = CommonVars("wds.streamis.job.config.key.alert.level", "wds.linkis.flink.alert.level")
+
+ /**
+ * Material model
+ */
+ val MATERIAL_MODEL: CommonVars[String] = CommonVars("wds.streamis.job.config.key.material.model", "wds.streamis.job.material.model")
+
+ val MANAGE_MODE_KEY: CommonVars[String] = CommonVars("wds.streamis.job.manage.mode.key", GovernanceCommonConf.EC_APP_MANAGE_MODE.key)
}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConstants.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConstants.scala
new file mode 100644
index 000000000..148d9a1c9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/conf/JobConstants.scala
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2021 WeBank
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.webank.wedatasphere.streamis.jobmanager.launcher.conf
+
+object JobConstants {
+
+ val MANAGE_MODE_DETACH = "detach"
+
+ val MANAGE_MODE_ATTACH = "attach"
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala
index 7eaf7c8a5..21a453ae9 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/DefaultStreamJobConfService.scala
@@ -41,7 +41,7 @@ class DefaultStreamJobConfService extends StreamJobConfService with Logging{
* @param valueMap value map
*/
@Transactional(rollbackFor = Array(classOf[Exception]))
- override def saveJobConfig(jobId: Long, valueMap: util.Map[String, Any]): Unit = {
+ override def saveJobConfig(jobId: Long, valueMap: util.Map[String, AnyRef]): Unit = {
val definitions = Option(this.streamJobConfMapper.loadAllDefinitions())
.getOrElse(new util.ArrayList[JobConfDefinition]())
// Can deserialize the value map at first
@@ -56,7 +56,7 @@ class DefaultStreamJobConfService extends StreamJobConfService with Logging{
* @param jobId job id
* @return
*/
- override def getJobConfig(jobId: Long): util.Map[String, Any] = {
+ override def getJobConfig(jobId: Long): util.Map[String, AnyRef] = {
getJobConfig(jobId, this.streamJobConfMapper.loadAllDefinitions())
}
@@ -80,7 +80,7 @@ class DefaultStreamJobConfService extends StreamJobConfService with Logging{
override def getJobConfValueSet(jobId: Long): JobConfValueSet = {
val valueSet = new JobConfValueSet
val definitions: util.List[JobConfDefinition] = this.streamJobConfMapper.loadAllDefinitions()
- val jobConfig: util.Map[String, Any] = getJobConfig(jobId, definitions)
+ val jobConfig: util.Map[String, AnyRef] = getJobConfig(jobId, definitions)
val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => (definition.getKey, definition)).toMap.asJava
valueSet.setResourceConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_RESOURCE.getValue, jobConfig, definitionMap))
valueSet.setParameterConfig(resolveConfigValueVo(JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue, jobConfig, definitionMap))
@@ -135,9 +135,9 @@ class DefaultStreamJobConfService extends StreamJobConfService with Logging{
* @param definitions definitions
* @return
*/
- private def getJobConfig(jobId: Long, definitions: util.List[JobConfDefinition]): util.Map[String, Any] = {
+ private def getJobConfig(jobId: Long, definitions: util.List[JobConfDefinition]): util.Map[String, AnyRef] = {
Option(this.streamJobConfMapper.getConfValuesByJobId(jobId)) match {
- case None => new util.HashMap[String, Any]()
+ case None => new util.HashMap[String, AnyRef]()
case Some(list: util.List[JobConfValue]) =>
JobConfValueUtils.serialize(list,
Option(definitions)
@@ -187,10 +187,10 @@ class DefaultStreamJobConfService extends StreamJobConfService with Logging{
* @param jobConfig job config
* @param definitionMap (key => definition)
*/
- private def resolveConfigValueVo(group: String, jobConfig: util.Map[String, Any],
+ private def resolveConfigValueVo(group: String, jobConfig: util.Map[String, AnyRef],
definitionMap: util.Map[String, JobConfDefinition]): util.List[JobConfValueVo] = {
Option(jobConfig.get(group)) match {
- case Some(configMap: util.Map[String, Any]) =>
+ case Some(configMap: util.Map[String, AnyRef]) =>
configMap.asScala.map{
case (key, value) =>
val configValue = new JobConfValueVo(key, String.valueOf(value))
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/LinkisFlinkManagerECRefreshService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/LinkisFlinkManagerECRefreshService.scala
new file mode 100644
index 000000000..33785564d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/LinkisFlinkManagerECRefreshService.scala
@@ -0,0 +1,16 @@
+package com.webank.wedatasphere.streamis.jobmanager.launcher.service
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.LinkisFlinkManagerClient
+import org.springframework.stereotype.Service
+
+import javax.annotation.PostConstruct
+
+@Service
+class LinkisFlinkManagerECRefreshService {
+
+ @PostConstruct
+ def init(): Unit = {
+ LinkisFlinkManagerClient.initScheduledTask()
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala
index 0e87a9e4c..ff7038e63 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/StreamJobConfService.scala
@@ -34,14 +34,14 @@ trait StreamJobConfService {
* @param jobId job id
* @param valueMap value map
*/
- def saveJobConfig(jobId: Long, valueMap: util.Map[String, Any]): Unit
+ def saveJobConfig(jobId: Long, valueMap: util.Map[String, AnyRef]): Unit
/**
* Query the job configuration
* @param jobId job id
* @return
*/
- def getJobConfig(jobId: Long): util.Map[String, Any]
+ def getJobConfig(jobId: Long): util.Map[String, AnyRef]
/**
* Query the job value
diff --git a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala
index 1ba386b20..2640d4b3f 100644
--- a/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala
+++ b/streamis-jobmanager/streamis-job-launcher/streamis-job-launcher-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/launcher/service/tools/JobConfValueUtils.scala
@@ -16,12 +16,14 @@
package com.webank.wedatasphere.streamis.jobmanager.launcher.service.tools
import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.{JobConfDefinition, JobConfValue}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.exception.ConfigurationException
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.utils.JobUtils
import org.apache.commons.lang.StringUtils
import scala.collection.JavaConverters._
import java.util
/**
- * TODO dive into JobConfValueSerializer and JobConfValueDeserializer
+ * dive into JobConfValueSerializer and JobConfValueDeserializer
*/
class JobConfValueUtils {
@@ -32,20 +34,24 @@ object JobConfValueUtils{
* Serialize the job conf values
* @return
*/
- def serialize(configValues: util.List[JobConfValue], definitions: util.List[JobConfDefinition]): util.Map[String, Any] = {
+ def serialize(configValues: util.List[JobConfValue], definitions: util.List[JobConfDefinition]): util.Map[String, AnyRef] = {
// First to build a definition map
val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => {
(definition.getId.toString, definition)
}).toMap.asJava
// Init a value map to store relation of config values
- val relationMap: util.Map[String, Any] = new util.HashMap[String, Any]()
+ val relationMap: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]()
configValues.asScala.foreach(keyValue => {
val refDefId = keyValue.getReferDefId
if (null != refDefId) {
Option(relationMap.get(refDefId.toString)) match {
- case Some(value: util.Map[String, Any]) => {
+ case Some(value: util.Map[String, AnyRef]) => {
// Put the value into relation
- value.put(keyValue.getKey, keyValue.getValue)
+ if (JobUtils.isAnyVal(keyValue.getValue)) {
+ value.put(keyValue.getKey, keyValue.getValue.toString)
+ } else {
+ value.put(keyValue.getKey, keyValue.getValue)
+ }
}
case Some(value: String) => {
// Overwrite it's value
@@ -56,32 +62,44 @@ object JobConfValueUtils{
var definition = definitionMap.get(refDefId.toString)
var value: Any = if (null != definition && (StringUtils.isBlank(definition.getType) ||
definition.getType.equalsIgnoreCase("NONE"))) {
- val relation = new util.HashMap[String, Any]()
- relation.put(keyValue.getKey, keyValue.getValue)
- relation
+ val relation = new util.HashMap[String, AnyRef]()
+ if (JobUtils.isAnyVal(keyValue.getValue)) {
+ relation.put(keyValue.getKey, keyValue.getValue.toString)
+ } else {
+ relation.put(keyValue.getKey, keyValue.getValue)
+ }
+ relation
} else {
keyValue.getValue
}
while (null != definition){
value = Option(relationMap.get(definition.getId.toString)) match {
- case Some(existV: util.Map[String, Any]) => {
+ case Some(existV: util.Map[String, AnyRef]) => {
value match {
- case map: util.Map[String, Any] =>
+ case map: util.Map[String, AnyRef] =>
existV.putAll(map)
existV
- case _ =>
- relationMap.put(definition.getId.toString, value)
+ case _ : AnyRef =>
+ relationMap.put(definition.getId.toString, value.asInstanceOf[AnyRef])
value
+ case _ =>
+ throw new ConfigurationException(s"Value : ${value} is not supported, not AnyRef")
}
}
- case _ =>
- relationMap.put(definition.getId.toString, value)
+ case _: AnyRef =>
+ relationMap.put(definition.getId.toString, value.asInstanceOf[AnyRef])
value
- }
- Option(definition.getParentRef) match {
+ case _ =>
+ throw new ConfigurationException(s"Value : ${value} is not supported, not AnyRef")
+ }
+ Option(definition.getParentRef) match {
case Some(parentRef) =>
- val newValue: util.Map[String, Any] = new util.HashMap[String, Any]()
- newValue.put(definition.getKey, value)
+ val newValue: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]()
+ if (JobUtils.isAnyVal(value)) {
+ newValue.put(definition.getKey, value.toString)
+ } else {
+ newValue.put(definition.getKey, value.asInstanceOf[AnyRef])
+ }
definition = definitionMap.get(parentRef.toString)
value = newValue
case _ => definition = null
@@ -104,7 +122,7 @@ object JobConfValueUtils{
* @param definitions definitions
* @return
*/
- def deserialize(valueMap: util.Map[String, Any], definitions: util.List[JobConfDefinition]):util.List[JobConfValue] = {
+ def deserialize(valueMap: util.Map[String, AnyRef], definitions: util.List[JobConfDefinition]):util.List[JobConfValue] = {
// First to build a definition map
val definitionMap: util.Map[String, JobConfDefinition] = definitions.asScala.map(definition => {
(definition.getKey, definition)
@@ -116,6 +134,7 @@ object JobConfValueUtils{
Option(definitionMap.get(key)) match {
case Some(definition) => if (definition.getLevel == 0){
configValues.addAll(deserializeInnerObj(key, value, null, definitionMap))
+ definition.setMark(true)
}
case _ =>
}
@@ -124,12 +143,12 @@ object JobConfValueUtils{
configValues
}
- private def deserializeInnerObj(key: String, value: Any, parentRef: String,
+ private def deserializeInnerObj(key: String, value: AnyRef, parentRef: String,
definitionMap: util.Map[String, JobConfDefinition]): util.List[JobConfValue] = {
val result: util.List[JobConfValue] = new util.ArrayList[JobConfValue]()
if (null != value) {
value match {
- case innerMap: util.Map[String, Any] =>
+ case innerMap: util.Map[String, AnyRef] =>
Option(definitionMap.get(key)) match {
case Some(definition) =>
innerMap.asScala.foreach{
@@ -142,6 +161,8 @@ object JobConfValueUtils{
result.addAll(childResult)
}
}
+ // Mark it used
+ definition.setMark(true)
case _ => //ignore
}
@@ -150,6 +171,8 @@ object JobConfValueUtils{
case Some(definition) =>
if (StringUtils.isBlank(parentRef) || parentRef.equals(String.valueOf(definition.getParentRef))){
result.add(new JobConfValue(key, String.valueOf(other), definition.getId))
+ // Mark it used
+ definition.setMark(true)
}
case _ => result.add(new JobConfValue(key, String.valueOf(other), null))
}
@@ -157,19 +180,5 @@ object JobConfValueUtils{
}
result
}
-// def main(args: Array[String]): Unit = {
-// val definitions: util.List[JobConfDefinition] = new util.ArrayList[JobConfDefinition]()
-// val configValues: util.List[JobConfValue] = new util.ArrayList[JobConfValue]()
-// definitions.add(new JobConfDefinition(0, "wds.linkis.flink.resource", "None", null, 0))
-// definitions.add(new JobConfDefinition(1, "wds.linkis.flink.custom", "None", null, 0))
-// definitions.add(new JobConfDefinition(2, "wds.linkis.flink.taskmanager.num", "NUMBER", 0, 1))
-// definitions.add(new JobConfDefinition(3, "wds.linkis.flink.jobmanager.memeory", "NUMBER", 0, 1))
-// configValues.add(new JobConfValue("wds.linkis.flink.taskmanager.num", "1", 2))
-// configValues.add(new JobConfValue("env.java.opts", "-DHADOOP_USER_NAME=hadoop", 1))
-// configValues.add(new JobConfValue("security.kerberos.login.principal", "hadoop@WEBANK.com", 1))
-// configValues.add(new JobConfValue("wds.linkis.flink.jobmanager.memeory", "1024", 3))
-// val result = serialize(configValues, definitions)
-// println(DWSHttpClient.jacksonJson.writeValueAsString(result))
-// println(DWSHttpClient.jacksonJson.writeValueAsString(deserialize(result, definitions)))
-// }
+
}
diff --git a/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dc13253b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/flink-streamis-log-collector/target/classes/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml
new file mode 100644
index 000000000..65a9e49f1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/pom.xml
@@ -0,0 +1,96 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ flink-streamis-log-collector
+
+
+ 8
+ 8
+
+ 1.12.2
+ 2.17.1
+ 1.7.15
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector
+ ${streamis.version}
+
+
+
+ org.apache.flink
+ flink-java
+ ${flink.version}
+ provided
+
+
+ org.apache.flink
+ flink-yarn_2.11
+ ${flink.version}
+ provided
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ ${log4j.version}
+ provided
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 2.3
+
+
+ assemble
+
+ single
+
+
+ install
+
+
+
+
+ src/main/assembly/package.xml
+
+ false
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml
new file mode 100644
index 000000000..8da27bf2c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/assembly/package.xml
@@ -0,0 +1,19 @@
+
+
+ package
+
+
+ jar
+
+ false
+
+
+ /
+ true
+ runtime
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java
new file mode 100644
index 000000000..046694c57
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigAutowired.java
@@ -0,0 +1,126 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.filters.KeywordThresholdFilter;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.GlobalConfiguration;
+import org.apache.flink.runtime.util.EnvironmentInformation;
+import org.apache.flink.yarn.configuration.YarnConfigOptions;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.filter.LevelMatchFilter;
+import org.apache.logging.log4j.core.filter.RegexFilter;
+import org.apache.logging.log4j.core.filter.ThresholdFilter;
+
+import java.util.Enumeration;
+import java.util.List;
+import java.util.Properties;
+
+import static com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigDefine.*;
+
+/**
+ * Autoconfigure the streamis config inf Flink environment
+ */
+public class FlinkStreamisConfigAutowired implements StreamisConfigAutowired {
+
+ /**
+ * Flink configuration
+ */
+ private Configuration configuration;
+
+ public FlinkStreamisConfigAutowired(){
+ // First to load configuration
+ // We should sleep and wait for append of the flink-yaml.conf
+ }
+ @Override
+ public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws IllegalAccessException {
+ this.configuration = loadConfiguration();
+ String applicationName =
+ this.configuration.getString(YarnConfigOptions.APPLICATION_NAME);
+ if (StringUtils.isNotBlank(applicationName)){
+ builder.setAppName(applicationName);
+ }
+ String gateway = this.configuration.getString(LOG_GATEWAY_ADDRESS);
+ if (StringUtils.isNotBlank(gateway)){
+ if (gateway.endsWith("/")){
+ gateway = gateway.substring(0, gateway.length() - 1);
+ }
+ gateway += this.configuration.getString(LOG_COLLECT_PATH, "/");
+ builder.setRpcAddress(gateway);
+ }
+ if (builder instanceof StreamisLog4j2AppenderConfig.Builder) {
+ List filterStrategies = this.configuration.get(LOG_FILTER_STRATEGIES);
+ for (String filterStrategy : filterStrategies) {
+ if ("LevelMatch".equals(filterStrategy)) {
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(LevelMatchFilter.newBuilder().setOnMatch(Filter.Result.ACCEPT).setOnMismatch(Filter.Result.DENY)
+ .setLevel(Level.getLevel(this.configuration.getString(LOG_FILTER_LEVEL_MATCH))).build());
+ } else if ("ThresholdMatch".equals(filterStrategy)) {
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(ThresholdFilter.createFilter(Level
+ .getLevel(this.configuration.getString(LOG_FILTER_THRESHOLD_MATCH)), Filter.Result.ACCEPT, Filter.Result.DENY));
+ } else if ("RegexMatch".equals(filterStrategy)) {
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(RegexFilter.createFilter(this.configuration.getString(LOG_FILTER_REGEX),
+ null, true, Filter.Result.ACCEPT, Filter.Result.DENY));
+ } else if ("Keyword".equals(filterStrategy)){
+ ((StreamisLog4j2AppenderConfig.Builder)builder).withFilter(
+ new KeywordThresholdFilter(
+ StringUtils.split(this.configuration.getString(LOG_FILTER_KEYWORDS), ","),
+ StringUtils.split(this.configuration.getString(LOG_FILTER_KEYWORDS_EXCLUDE), ",")));
+ }
+ }
+ }
+ String hadoopUser = EnvironmentInformation.getHadoopUser();
+ if (hadoopUser.equals("") || hadoopUser.equals("")){
+ hadoopUser = System.getProperty("user.name");
+ }
+ return builder.setDebugMode(this.configuration.getBoolean(DEBUG_MODE))
+ .setRpcConnTimeout(this.configuration.getInteger(LOG_RPC_CONN_TIMEOUT))
+ .setRpcSocketTimeout(this.configuration.getInteger(LOG_RPC_SOCKET_TIMEOUT))
+ .setRpcSendRetryCnt(this.configuration.getInteger(LOG_RPC_SEND_RETRY_COUNT))
+ .setRpcServerRecoveryTimeInSec(this.configuration.getInteger(LOG_RPC_SERVER_RECOVERY_TIME))
+ .setRpcMaxDelayTimeInSec(this.configuration.getInteger(LOG_RPC_MAX_DELAY_TIME))
+ .setRpcAuthTokenCodeKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE_KEY))
+ .setRpcAuthTokenUserKey(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER_KEY))
+ .setRpcAuthTokenCode(this.configuration.getString(LOG_RPC_AUTH_TOKEN_CODE))
+ .setRpcAuthTokenUser(this.configuration.getString(LOG_RPC_AUTH_TOKEN_USER,
+ hadoopUser))
+ .setRpcCacheSize(this.configuration.getInteger(LOG_RPC_CACHE_SIZE))
+ .setRpcCacheMaxConsumeThread(this.configuration.getInteger(LOG_PRC_CACHE_MAX_CONSUME_THREAD))
+ .setDiscard(this.configuration.getBoolean(LOG_RPC_CACHE_DISCARD))
+ .setDiscardWindow(this.configuration.getInteger(LOG_RPC_CACHE_DISCARD_WINDOW))
+ .setRpcBufferSize(this.configuration.getInteger(LOG_RPC_BUFFER_SIZE))
+ .setRpcBufferExpireTimeInSec(this.configuration.getInteger(LOG_RPC_BUFFER_EXPIRE_TIME)).build();
+ }
+
+
+ /**
+ * According to :
+ * String launchCommand =
+ * BootstrapTools.getTaskManagerShellCommand(
+ * flinkConfig,
+ * tmParams,
+ * ".",
+ * ApplicationConstants.LOG_DIR_EXPANSION_VAR,
+ * hasLogback,
+ * hasLog4j,
+ * hasKrb5,
+ * taskManagerMainClass,
+ * taskManagerDynamicProperties);
+ * the configuration directory of Flink yarn container is always ".",
+ * @return configuration
+ */
+ private synchronized Configuration loadConfiguration(){
+ String configDir = ".";
+ Properties properties = System.getProperties();
+ Enumeration> enumeration = properties.propertyNames();
+ Configuration dynamicConfiguration = new Configuration();
+ while(enumeration.hasMoreElements()){
+ String prop = String.valueOf(enumeration.nextElement());
+ dynamicConfiguration.setString(prop, properties.getProperty(prop));
+ }
+ return GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java
new file mode 100644
index 000000000..b483b4eda
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkStreamisConfigDefine.java
@@ -0,0 +1,160 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink;
+
+import org.apache.flink.configuration.ConfigOption;
+import org.apache.flink.configuration.ConfigOptions;
+
+import java.util.List;
+
+/**
+ * Config definition
+ */
+public class FlinkStreamisConfigDefine {
+
+ private FlinkStreamisConfigDefine(){}
+
+
+ private static String error ="ERROR";
+
+ /**
+ * Gateway address of log module for streamis
+ */
+ public static final ConfigOption LOG_GATEWAY_ADDRESS = ConfigOptions.key("stream.log.gateway.address")
+ .stringType().noDefaultValue().withDescription("The gateway address ex: http://127.0.0.1:8080");
+
+ /**
+ * Entrypoint path of collecting log
+ */
+ public static final ConfigOption LOG_COLLECT_PATH = ConfigOptions.key("stream.log.collect.path")
+ .stringType().defaultValue("/api/rest_j/v1/streamis/streamJobManager/log/collect/events").withDescription("The entrypoint path of collecting log");
+
+ /**
+ * Connection timeout(in milliseconds) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_CONN_TIMEOUT = ConfigOptions.key("stream.log.rpc.connect-timeout")
+ .intType().defaultValue(3000).withDescription("Connection timeout(ms) in log RPC module");
+
+ /**
+ * Socket timeout(in milliseconds) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_SOCKET_TIMEOUT = ConfigOptions.key("stream.log.rpc.socket-timeout")
+ .intType().defaultValue(15000).withDescription("Socket timeout(ms) in log RPC module");
+
+ /**
+ * Max retry count of sending message in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_SEND_RETRY_COUNT = ConfigOptions.key("stream.log.rpc.send-retry-count")
+ .intType().defaultValue(3).withDescription("Max retry count of sending message in log RPC module");
+
+ /**
+ * Server recovery time(in seconds) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_SERVER_RECOVERY_TIME = ConfigOptions.key("stream.log.rpc.server-recovery-time-in-sec")
+ .intType().defaultValue(5).withDescription("Server recovery time(sec) in log RPC module");
+
+ /**
+ * Max delay time(in seconds) in log RPC module. if reach the limit, the message will be dropped
+ */
+ public static final ConfigOption LOG_RPC_MAX_DELAY_TIME = ConfigOptions.key("stream.log.rpc.max-delay-time")
+ .intType().defaultValue(60).withDescription("Max delay time(sec) in log RPC module");
+
+ /**
+ * Token code key in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE_KEY = ConfigOptions.key("stream.log.rpc.auth.token-code-key")
+ .stringType().defaultValue("Token-Code").withDescription("Token code key in log RPC auth module");
+
+ /**
+ * Token user key in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER_KEY = ConfigOptions.key("stream.log.rpc.auth.token-user-key")
+ .stringType().defaultValue("Token-User").withDescription("Token user key in log RPC auth module");
+
+ /**
+ * Token code in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_CODE = ConfigOptions.key("stream.log.rpc.auth.token-code")
+ .stringType().defaultValue("STREAM-LOG").withDescription("Token code in log RPC auth module");
+
+ /**
+ * Token user in log RPC auth module
+ */
+ public static final ConfigOption LOG_RPC_AUTH_TOKEN_USER = ConfigOptions.key("stream.log.rpc.auth.token-user")
+ .stringType().defaultValue(System.getProperty("user.name")).withDescription("Token user in log RPC auth module");
+
+ /**
+ * Cache size in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_CACHE_SIZE = ConfigOptions.key("stream.log.rpc.cache.size")
+ .intType().defaultValue(150).withDescription("Cache size in log RPC module");
+
+ /**
+ * Max cache consume threads in log RPC module
+ */
+ public static final ConfigOption LOG_PRC_CACHE_MAX_CONSUME_THREAD = ConfigOptions.key("stream.log.rpc.cache.max-consume-thread")
+ .intType().defaultValue(2).withDescription("Max cache consume threads in log RPC module");
+
+ /**
+ * If discard the useless log
+ */
+ public static final ConfigOption LOG_RPC_CACHE_DISCARD = ConfigOptions.key("stream.log.rpc.cache.discard")
+ .booleanType().defaultValue(true).withDescription("If discard the useless log");
+
+ /**
+ * The window size of discarding
+ */
+ public static final ConfigOption LOG_RPC_CACHE_DISCARD_WINDOW = ConfigOptions.key("stream.log.rpc.cache.discard-window")
+ .intType().defaultValue(2).withDescription("The window size of discarding");
+ /**
+ * Buffer size in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_BUFFER_SIZE = ConfigOptions.key("stream.log.rpc.buffer.size")
+ .intType().defaultValue(50).withDescription("Buffer size in log RPC module");
+
+ /**
+ * Buffer expire time(sec) in log RPC module
+ */
+ public static final ConfigOption LOG_RPC_BUFFER_EXPIRE_TIME = ConfigOptions.key("stream.log.rpc.buffer.expire-time-in-sec")
+ .intType().defaultValue(2).withDescription("Buffer expire time (sec) in log RPC module");
+
+ /**
+ * Log filter strategy list
+ */
+ public static final ConfigOption> LOG_FILTER_STRATEGIES = ConfigOptions.key("stream.log.filter.strategies")
+ .stringType().asList().defaultValues("Keyword").withDescription("Log filter strategy list");
+
+ /**
+ * Level value of LevelMatch filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_LEVEL_MATCH = ConfigOptions.key("stream.log.filter.level-match.level")
+ .stringType().defaultValue(error).withDescription("Level value of LevelMatch filter strategy");
+
+
+ /**
+ * Level value of ThresholdMatch filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_THRESHOLD_MATCH = ConfigOptions.key("stream.log.filter.threshold.level")
+ .stringType().defaultValue(error).withDescription("Level value of ThresholdMatch filter strategy");
+ /**
+ * Regex value of RegexMatch filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_REGEX = ConfigOptions.key("stream.log.filter.regex.value")
+ .stringType().defaultValue(".*").withDescription("Regex value of RegexMatch filter strategy");
+
+ /**
+ * Accept keywords of Keyword filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_KEYWORDS = ConfigOptions.key("stream.log.filter.keywords")
+ .stringType().defaultValue(error).withDescription("Accept keywords of Keyword filter strategy");
+
+ /**
+ * Exclude keywords of Keyword filter strategy
+ */
+ public static final ConfigOption LOG_FILTER_KEYWORDS_EXCLUDE = ConfigOptions.key("stream.log.filter.keywords.exclude")
+ .stringType().defaultValue("").withDescription("Exclude keywords of Keyword filter strategy");
+
+ /**
+ * Debug mode
+ */
+ public static final ConfigOption DEBUG_MODE = ConfigOptions.key("stream.log.debug")
+ .booleanType().defaultValue(false).withDescription("Debug mode");
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dc13253b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java
new file mode 100644
index 000000000..bd9cef938
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/flink/FlinkConfigurationLoadTest.java
@@ -0,0 +1,26 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.flink;
+
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.configuration.GlobalConfiguration;
+import org.junit.Test;
+
+import java.util.Enumeration;
+import java.util.Objects;
+import java.util.Properties;
+
+public class FlinkConfigurationLoadTest {
+
+ @Test
+ public void loadConfiguration() {
+ String configDir = Objects.requireNonNull(FlinkConfigurationLoadTest.class.getResource("/")).getFile();
+ Properties properties = System.getProperties();
+ Enumeration> enumeration = properties.propertyNames();
+ Configuration dynamicConfiguration = new Configuration();
+ while(enumeration.hasMoreElements()){
+ String prop = String.valueOf(enumeration.nextElement());
+ dynamicConfiguration.setString(prop, properties.getProperty(prop));
+ }
+ GlobalConfiguration.loadConfiguration(configDir, dynamicConfiguration);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dc13253b7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.flink.FlinkStreamisConfigAutowired
\ No newline at end of file
diff --git a/docs/zh_CN/0.2.0/architecture/README.md b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml
similarity index 100%
rename from docs/zh_CN/0.2.0/architecture/README.md
rename to streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/flink-conf.yaml
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml
new file mode 100644
index 000000000..ee3f4125a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/flink-streamis-log-collector/src/test/resources/log4j2.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ `
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml
new file mode 100644
index 000000000..1be8f531a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/pom.xml
@@ -0,0 +1,40 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-collector-core
+
+
+ 8
+ 8
+ 4.5.13
+ 4.5.4
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-common
+ ${streamis.version}
+
+
+
+ org.apache.httpcomponents
+ httpclient
+ ${httpclient.version}
+
+
+ org.apache.httpcomponents
+ httpmime
+ ${httpmine.version}
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java
new file mode 100644
index 000000000..4c9ac6ea8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/ExceptionListener.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+/**
+ * Exception listener
+ */
+public interface ExceptionListener {
+
+ /**
+ * Listen the exception
+ * @param subject the subject that throws the exception
+ * @param t Throwable
+ * @param message message
+ */
+ void onException(Object subject, Throwable t, String message);
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java
new file mode 100644
index 000000000..f11556cc8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/cache/LogCache.java
@@ -0,0 +1,43 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.cache;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Log cache
+ * @param element
+ */
+public interface LogCache {
+
+ /**
+ * Cache log
+ * @param logElement log element
+ */
+ void cacheLog(E logElement) throws InterruptedException;
+
+ /**
+ * Drain log elements into collection
+ * @param elements elements
+ * @param maxElements max elements size
+ * @return count
+ */
+ int drainLogsTo(List elements, int maxElements);
+
+ /**
+ * Take log element
+ * @return log element
+ */
+ E takeLog(long timeout, TimeUnit unit) throws InterruptedException;
+
+ /**
+ * If the cache is full
+ * @return
+ */
+ boolean isCacheable();
+ /**
+ * Release the resource
+ */
+ void destroy();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java
new file mode 100644
index 000000000..ebf9b7f68
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcAuthConfig.java
@@ -0,0 +1,86 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+
+/**
+ * Authentication config
+ */
+public class RpcAuthConfig {
+ /**
+ * Key of token-code
+ */
+ private String tokenCodeKey = "Token-Code";
+
+ /**
+ * Key of token-user
+ */
+ private String tokenUserKey = "Token-User";
+
+ /**
+ * Token user
+ */
+ private String tokenUser = System.getProperty("user.name");
+
+ /**
+ * Token code
+ */
+ private String tokenCode = "STREAM-LOG";
+
+ public RpcAuthConfig(){
+
+ }
+
+ public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser){
+ if (null != tokenCodeKey) {
+ this.tokenCodeKey = tokenCodeKey;
+ }
+ if (null != tokenCode){
+ this.tokenCode = tokenCode;
+ }
+ if (null != tokenUserKey){
+ this.tokenUserKey = tokenUserKey;
+ }
+ if (null != tokenUser){
+ this.tokenUser = tokenUser;
+ }
+ }
+
+ public String getTokenCodeKey() {
+ return tokenCodeKey;
+ }
+
+ public void setTokenCodeKey(String tokenCodeKey) {
+ this.tokenCodeKey = tokenCodeKey;
+ }
+
+ public String getTokenUserKey() {
+ return tokenUserKey;
+ }
+
+ public void setTokenUserKey(String tokenUserKey) {
+ this.tokenUserKey = tokenUserKey;
+ }
+
+ public String getTokenUser() {
+ return tokenUser;
+ }
+
+ public void setTokenUser(String tokenUser) {
+ this.tokenUser = tokenUser;
+ }
+
+ public String getTokenCode() {
+ return tokenCode;
+ }
+
+ public void setTokenCode(String tokenCode) {
+ this.tokenCode = tokenCode;
+ }
+
+ @Override
+ public String toString() {
+ return "RpcAuthConfig{" +
+ ", tokenUserKey='" + tokenUserKey + '\'' +
+ ", tokenUser='" + tokenUser + '\'' +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java
new file mode 100644
index 000000000..0fb03185f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/RpcLogSenderConfig.java
@@ -0,0 +1,180 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+
+import java.util.Objects;
+
+/**
+ * Rpc sender configuration
+ */
+public class RpcLogSenderConfig {
+
+ /**
+ * Send address
+ */
+ private String address;
+
+ /**
+ * Timeout of connecting
+ */
+ private int connectionTimeout = 3000;
+
+ /**
+ * Timeout of reading from socket
+ */
+ private int socketTimeout = 15000;
+
+ /**
+ * Retry count of sending
+ */
+ private int sendRetryCnt = 3;
+
+ /**
+ * The time for server recovery
+ */
+ private int serverRecoveryTimeInSec = 5;
+
+ /**
+ * Retry max delay time of sender
+ */
+ private int maxDelayTimeInSec = 60;
+
+ /**
+ * If open debug mode
+ */
+ private boolean debugMode = false;
+ /**
+ * Auth config
+ */
+ private RpcAuthConfig authConfig = new RpcAuthConfig();
+
+ /**
+ * Cache config
+ */
+ private SendLogCacheConfig cacheConfig = new SendLogCacheConfig();
+
+ /**
+ * Buffer config
+ */
+ private SendBufferConfig bufferConfig = new SendBufferConfig();
+
+ public RpcLogSenderConfig(){
+
+ }
+
+ public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout,
+ int serverRecoveryTimeInSec, int maxDelayTimeInSec,
+ RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig){
+ this.address = address;
+ this.sendRetryCnt = sendRetryCnt;
+ this.connectionTimeout = connectionTimeout;
+ this.socketTimeout = socketTimeout;
+ this.serverRecoveryTimeInSec = serverRecoveryTimeInSec;
+ this.maxDelayTimeInSec = maxDelayTimeInSec;
+ if (Objects.nonNull(authConfig)){
+ this.authConfig = authConfig;
+ }
+ if (Objects.nonNull(cacheConfig)){
+ this.cacheConfig = cacheConfig;
+ }
+ if (Objects.nonNull(bufferConfig)){
+ this.bufferConfig = bufferConfig;
+ }
+ }
+
+ public RpcAuthConfig getAuthConfig() {
+ return authConfig;
+ }
+
+ public void setAuthConfig(RpcAuthConfig authConfig) {
+ this.authConfig = authConfig;
+ }
+
+ public SendLogCacheConfig getCacheConfig() {
+ return cacheConfig;
+ }
+
+ public void setCacheConfig(SendLogCacheConfig cacheConfig) {
+ this.cacheConfig = cacheConfig;
+ }
+
+ public SendBufferConfig getBufferConfig() {
+ return bufferConfig;
+ }
+
+ public void setBufferConfig(SendBufferConfig bufferConfig) {
+ this.bufferConfig = bufferConfig;
+ }
+
+ public String getAddress() {
+ return address;
+ }
+
+ public void setAddress(String address) {
+ this.address = address;
+ }
+
+ public int getSendRetryCnt() {
+ return sendRetryCnt;
+ }
+
+ public void setSendRetryCnt(int sendRetryCnt) {
+ this.sendRetryCnt = sendRetryCnt;
+ }
+
+ public int getConnectionTimeout() {
+ return connectionTimeout;
+ }
+
+ public void setConnectionTimeout(int connectionTimeout) {
+ this.connectionTimeout = connectionTimeout;
+ }
+
+ public int getSocketTimeout() {
+ return socketTimeout;
+ }
+
+ public void setSocketTimeout(int socketTimeout) {
+ this.socketTimeout = socketTimeout;
+ }
+
+ public int getMaxDelayTimeInSec() {
+ return maxDelayTimeInSec;
+ }
+
+ public void setMaxDelayTimeInSec(int maxDelayTimeInSec) {
+ this.maxDelayTimeInSec = maxDelayTimeInSec;
+ }
+
+ public int getServerRecoveryTimeInSec() {
+ return serverRecoveryTimeInSec;
+ }
+
+ public void setServerRecoveryTimeInSec(int serverRecoveryTimeInSec) {
+ this.serverRecoveryTimeInSec = serverRecoveryTimeInSec;
+ }
+
+ public boolean isDebugMode() {
+ return debugMode;
+ }
+
+ public void setDebugMode(boolean debugMode) {
+ this.debugMode = debugMode;
+ }
+
+ @Override
+ public String toString() {
+ return "RpcLogSenderConfig{" +
+ "address='" + address + '\'' +
+ ", connectionTimeout=" + connectionTimeout +
+ ", socketTimeout=" + socketTimeout +
+ ", sendRetryCnt=" + sendRetryCnt +
+ ", serverRecoveryTimeInSec=" + serverRecoveryTimeInSec +
+ ", maxDelayTimeInSec=" + maxDelayTimeInSec +
+ ", authConfig=" + authConfig +
+ ", cacheConfig=" + cacheConfig +
+ ", bufferConfig=" + bufferConfig +
+ ", debug=" + debugMode +
+ '}';
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java
new file mode 100644
index 000000000..6be0ae826
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendBufferConfig.java
@@ -0,0 +1,47 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+
+public class SendBufferConfig {
+ /**
+ * Size of send buffer
+ */
+ private int size = 50;
+
+ /**
+ * Expire time of send buffer
+ */
+ private long expireTimeInSec = 2;
+
+ public SendBufferConfig(){
+
+ }
+
+ public SendBufferConfig(int size, long expireTimeInSec){
+ this.size = size;
+ this.expireTimeInSec = expireTimeInSec;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public void setSize(int size) {
+ this.size = size;
+ }
+
+ public long getExpireTimeInSec() {
+ return expireTimeInSec;
+ }
+
+ public void setExpireTimeInSec(long expireTimeInSec) {
+ this.expireTimeInSec = expireTimeInSec;
+ }
+
+ @Override
+ public String toString() {
+ return "SendBufferConfig{" +
+ "size=" + size +
+ ", expireTimeInSec=" + expireTimeInSec +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java
new file mode 100644
index 000000000..e40a630c7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/SendLogCacheConfig.java
@@ -0,0 +1,81 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+/**
+ * Cache config
+ */
+
+public class SendLogCacheConfig {
+ /**
+ * Size of send cache
+ */
+ private int size = 150;
+
+ /**
+ * Max number of consuming thread
+ */
+ private int maxConsumeThread = 2;
+
+ /**
+ * The switch to discard log
+ */
+ private boolean discard = true;
+
+ /**
+ * Discard window in second
+ */
+ private int discardWindow = 2;
+
+ public SendLogCacheConfig(){
+
+ }
+
+ public SendLogCacheConfig(int size, int maxConsumeThread){
+ this.size = size;
+ this.maxConsumeThread = maxConsumeThread;
+ }
+
+ public int getSize() {
+ return size;
+ }
+
+ public void setSize(int size) {
+ this.size = size;
+ }
+
+ public int getMaxConsumeThread() {
+ return maxConsumeThread;
+ }
+
+ public void setMaxConsumeThread(int maxConsumeThread) {
+ this.maxConsumeThread = maxConsumeThread;
+ }
+
+ public boolean isDiscard() {
+ return discard;
+ }
+
+ public void setDiscard(boolean discard) {
+ this.discard = discard;
+ }
+
+ public int getDiscardWindow() {
+ return discardWindow;
+ }
+
+ public void setDiscardWindow(int discardWindow) {
+ this.discardWindow = discardWindow;
+ }
+
+ @Override
+ public String toString() {
+ return "SendLogCacheConfig{" +
+ "size=" + size +
+ ", maxConsumeThread=" + maxConsumeThread +
+ ", discard=" + discard +
+ ", discardWindow=" + discardWindow +
+ '}';
+ }
+
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java
new file mode 100644
index 000000000..f874c8d8b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/config/StreamisLogAppenderConfig.java
@@ -0,0 +1,260 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.config;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * Appender config
+ */
+public class StreamisLogAppenderConfig {
+
+ protected final String applicationName;
+
+
+ protected final RpcLogSenderConfig senderConfig;
+
+ /**
+ * Message filters
+ */
+ protected final List messageFilters;
+ protected StreamisLogAppenderConfig(String applicationName, RpcLogSenderConfig rpcLogSenderConfig,
+ List messageFilters){
+ this.applicationName = applicationName;
+ this.senderConfig = null != rpcLogSenderConfig? rpcLogSenderConfig : new RpcLogSenderConfig();
+ this.messageFilters = messageFilters;
+ }
+
+ public static class Builder{
+ /**
+ * Application name
+ */
+ protected String applicationName;
+
+ /**
+ * Sender config
+ */
+ protected final RpcLogSenderConfig rpcLogSenderConfig;
+
+ /**
+ * Message filters
+ */
+ protected final List messageFilters = new ArrayList<>();
+
+ public Builder(String applicationName,
+ RpcLogSenderConfig rpcLogSenderConfig){
+ this.applicationName = applicationName;
+
+ this.rpcLogSenderConfig = Optional.ofNullable(rpcLogSenderConfig).orElse(new RpcLogSenderConfig());
+ }
+
+ /**
+ * Set application name
+ * @param applicationName application name
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setAppName(String applicationName){
+ this.applicationName = applicationName;
+ return this;
+ }
+
+
+
+ /**
+ * Rpc address
+ * @param address address
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAddress(String address){
+ this.rpcLogSenderConfig.setAddress(address);
+ return this;
+ }
+
+ /**
+ * Rpc connect timeout
+ * @param connectionTimeout connection timeout
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcConnTimeout(int connectionTimeout){
+ this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout);
+ return this;
+ }
+
+ /**
+ * Rpc socket timeout
+ * @param socketTimeout socket timeout
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcSocketTimeout(int socketTimeout){
+ this.rpcLogSenderConfig.setSocketTimeout(socketTimeout);
+ return this;
+ }
+
+ /**
+ * Rpc send retry count
+ * @param sendRetryCnt send retry count
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcSendRetryCnt(int sendRetryCnt){
+ this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt);
+ return this;
+ }
+
+ /**
+ * Rpc server recovery time in seconds
+ * @param serverRecoveryTimeInSec server recovery time
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){
+ this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec);
+ return this;
+ }
+
+ /**
+ * Rpc max delay time in seconds
+ * @param maxDelayTimeInSec max delay time in seconds
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){
+ this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec);
+ return this;
+ }
+
+ /**
+ * Rpc auth token code key
+ * @param tokenCodeKey key of token code
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenCodeKey(String tokenCodeKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey);
+ return this;
+ }
+
+ /**
+ * Rpc auth token user key
+ * @param tokenUserKey key of token user
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenUserKey(String tokenUserKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey);
+ return this;
+ }
+
+ /**
+ * Rpc auth token user
+ * @param tokenUser token user
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenUser(String tokenUser){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser);
+ return this;
+ }
+
+ /**
+ * Rpc auth token code
+ * @param tokenCode token code
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcAuthTokenCode(String tokenCode){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode);
+ return this;
+ }
+
+ /**
+ * Rpc cache size
+ * @param cacheSize cache size
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcCacheSize(int cacheSize){
+ this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize);
+ return this;
+ }
+
+ /**
+ * Rpc cache max consume thread
+ * @param maxConsumeThread max consume thread
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcCacheMaxConsumeThread(int maxConsumeThread){
+ this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread);
+ return this;
+ }
+
+ /**
+ * Rpc buffer size
+ * @param bufferSize buffer size
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcBufferSize(int bufferSize){
+ this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize);
+ return this;
+ }
+
+ /**
+ * Rpc buffer expire time in seconds
+ * @param expireTimeInSec expire time
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setRpcBufferExpireTimeInSec(int expireTimeInSec){
+ this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec);
+ return this;
+ }
+
+ /**
+ * Add log message filter
+ * @param messageFilter message filter
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder withMessageFilter(LogMessageFilter messageFilter){
+ this.messageFilters.add(messageFilter);
+ return this;
+ }
+
+ /**
+ * Set to discard the useless log
+ * @param discard discard
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setDiscard(boolean discard){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscard(discard);
+ return this;
+ }
+
+ /**
+ * Set the window size of discarding
+ * @param windowSize
+ * @return
+ */
+ public StreamisLogAppenderConfig.Builder setDiscardWindow(int windowSize){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscardWindow(windowSize);
+ return this;
+ }
+ /**
+ * Switch to debug
+ * @param debugMode debug mode
+ * @return builder
+ */
+ public StreamisLogAppenderConfig.Builder setDebugMode(boolean debugMode){
+ this.rpcLogSenderConfig.setDebugMode(debugMode);
+ return this;
+ }
+
+ public StreamisLogAppenderConfig build(){
+ return new StreamisLogAppenderConfig(applicationName, rpcLogSenderConfig, messageFilters);
+ }
+ }
+ public String getApplicationName() {
+ return applicationName;
+ }
+
+
+ public RpcLogSenderConfig getSenderConfig() {
+ return senderConfig;
+ }
+
+ public List getMessageFilters() {
+ return messageFilters;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java
new file mode 100644
index 000000000..5d12ea071
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/KeywordMessageFilter.java
@@ -0,0 +1,126 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters;
+
+import java.lang.reflect.Field;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.regex.Pattern;
+
+/**
+ * Message filter of keyword
+ */
+public class KeywordMessageFilter implements LogMessageFilter{
+
+ /**
+ * Accept keywords
+ */
+ private final String[] acceptKeywords;
+
+ /**
+ * Regex pattern of accept keywords
+ */
+ private Pattern acceptPattern;
+ /**
+ * Exclude keywords
+ */
+ private final String[] excludeKeywords;
+
+ /**
+ * Regex pattern of exclude keywords
+ */
+ private Pattern excludePattern;
+
+ /**
+ * Flags for pattern
+ */
+ private int patternFlag = 0;
+
+ public KeywordMessageFilter(String[] acceptKeywords, String[] excludeKeywords){
+ this(acceptKeywords, excludeKeywords, null);
+ }
+
+ public KeywordMessageFilter(String[] acceptKeywords, String[] excludeKeywords, String[] patternFlags){
+ this.acceptKeywords = acceptKeywords;
+ this.excludeKeywords = excludeKeywords;
+ try {
+ this.patternFlag = toPatternFlags(patternFlags);
+ } catch (IllegalAccessException e) {
+ // Ignore
+ }
+ // Build regex pattern
+ if (acceptKeywords != null && acceptKeywords.length > 0){
+ this.acceptPattern = toMatchPattern(acceptKeywords, this.patternFlag);
+ }
+ if (excludeKeywords != null && excludeKeywords.length > 0){
+ this.excludePattern = toMatchPattern(excludeKeywords, this.patternFlag);
+ }
+ }
+
+ @Override
+ public boolean doFilter(String logger, String message) {
+ boolean accept = true;
+ if (null != acceptPattern){
+ accept = acceptPattern.matcher(message).find();
+ }
+ if (accept && null != excludePattern){
+ accept = !excludePattern.matcher(message).find();
+ }
+ return accept;
+ }
+
+ /**
+ * Convert to pattern
+ * @param keywords keyword array
+ * @param flag pattern flag
+ * @return Regex pattern
+ */
+ protected Pattern toMatchPattern(String[] keywords, int flag){
+ StringBuilder patternStr = new StringBuilder("(");
+ for(int i = 0; i < keywords.length; i++){
+ patternStr.append(keywords[i]);
+ if (i != keywords.length - 1){
+ patternStr.append("|");
+ }
+ }
+ patternStr.append(")");
+ return Pattern.compile(patternStr.toString(), flag);
+ }
+
+ /**
+ * Convert the pattern flag array to int
+ * @param patternFlags flag string array
+ * @return int value
+ * @throws IllegalArgumentException
+ * @throws IllegalAccessException
+ */
+ private static int toPatternFlags(final String[] patternFlags) throws IllegalArgumentException,
+ IllegalAccessException {
+ if (patternFlags == null || patternFlags.length == 0) {
+ return 0;
+ }
+ final Field[] fields = Pattern.class.getDeclaredFields();
+ final Comparator comparator = Comparator.comparing(Field::getName);
+ Arrays.sort(fields, comparator);
+ final String[] fieldNames = new String[fields.length];
+ for (int i = 0; i < fields.length; i++) {
+ fieldNames[i] = fields[i].getName();
+ }
+ int flags = 0;
+ for (final String test : patternFlags) {
+ final int index = Arrays.binarySearch(fieldNames, test);
+ if (index >= 0) {
+ final Field field = fields[index];
+ flags |= field.getInt(Pattern.class);
+ }
+ }
+ return flags;
+ }
+
+ public final String[] getAcceptKeywords(){
+ return this.acceptKeywords;
+ }
+
+ public final String[] getExcludeKeywords(){
+ return this.excludeKeywords;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java
new file mode 100644
index 000000000..bc778bea5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilter.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters;
+
+/**
+ * Log message filter, filter the message content (layout formatted)
+ */
+public interface LogMessageFilter {
+ /**
+ * Filter formatted message
+ * @param logger logger name
+ * @param message message content
+ * @return if match the filter
+ */
+ boolean doFilter(String logger, String message);
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java
new file mode 100644
index 000000000..f98427afa
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/message/filters/LogMessageFilterAdapter.java
@@ -0,0 +1,13 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters;
+
+/**
+ * Interface for adaptor
+ */
+public interface LogMessageFilterAdapter {
+
+ /**
+ * Message filter
+ * @return filter
+ */
+ LogMessageFilter getLogMessageFilter();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java
new file mode 100644
index 000000000..deced7918
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/AbstractRpcLogSender.java
@@ -0,0 +1,568 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.ImmutableSendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.io.IOException;
+import java.util.*;
+import java.util.concurrent.*;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Abstract rpc log sender
+ * @param
+ * @param
+ */
+public abstract class AbstractRpcLogSender implements RpcLogSender{
+
+ /**
+ * Size of log cache
+ */
+ int cacheSize;
+
+ /**
+ * The buffer size of sender
+ */
+ int sendBufSize;
+
+ /**
+ * Max thread num of send
+ */
+ int maxCacheConsume;
+
+ /**
+ * Connect config
+ */
+ protected RpcLogSenderConfig rpcSenderConfig;
+
+ /**
+ * Rpc log context
+ */
+ private RpcLogContext rpcLogContext;
+
+ protected boolean isTerminated = false;
+ /**
+ * Use the listener instead of log4j structure
+ */
+ protected ExceptionListener exceptionListener;
+
+
+ protected AbstractRpcLogSender(RpcLogSenderConfig rpcSenderConfig){
+ this.rpcSenderConfig = rpcSenderConfig;
+ SendLogCacheConfig cacheConfig = rpcSenderConfig.getCacheConfig();
+ this.cacheSize = cacheConfig.getSize();
+ this.maxCacheConsume = cacheConfig.getMaxConsumeThread();
+ this.sendBufSize = rpcSenderConfig.getBufferConfig().getSize();
+ if (sendBufSize > cacheSize) {
+ throw new IllegalArgumentException("Size of send buffer is larger than cache size");
+ }
+
+ }
+
+ @Override
+ public LogCache getOrCreateLogCache() {
+ return getOrCreateRpcLogContext().getLogCache();
+ }
+
+ @Override
+ public void sendLog(T log) {
+ // Just send it into log cache
+ try {
+ getOrCreateLogCache().cacheLog(log);
+ } catch (InterruptedException e) {
+ // Invoke exception listener
+ Optional.ofNullable(exceptionListener).ifPresent(listener ->
+ listener.onException(this, e, null));
+ }
+ }
+
+ @Override
+ public void syncSendLog(T log) {
+
+ }
+
+ @Override
+ public void setExceptionListener(ExceptionListener listener) {
+ this.exceptionListener = listener;
+ }
+
+ @Override
+ public void close() {
+ getOrCreateRpcLogContext().destroyCacheConsumers();
+ this.isTerminated = true;
+ }
+
+ /**
+ * Aggregate send buffer for sending
+ * @param sendBuffer send buffer
+ * @return E aggregated entity
+ */
+ protected abstract E aggregateBuffer(SendBuffer sendBuffer);
+
+ /**
+ * Sending operation
+ * @param aggregatedEntity agg entity
+ * @param rpcSenderConfig rpc sender config
+ */
+ protected abstract void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws IOException;
+
+ /**
+ * Send log exception strategy
+ * @return exception strategy
+ */
+ protected abstract SendLogExceptionStrategy getSendLogExceptionStrategy();
+
+ protected synchronized RpcLogContext getOrCreateRpcLogContext() {
+ if (null == this.rpcLogContext) {
+ // Use fair lock
+ SendLogCache logCache = new QueuedSendLogCache(this.cacheSize,
+ this.rpcSenderConfig.getCacheConfig().isDiscard(),
+ this.rpcSenderConfig.getCacheConfig().getDiscardWindow() * 1000, false);
+ RpcLogContext context = new RpcLogContext(logCache);
+ // Start cache consumers
+ for (int i = 0; i < maxCacheConsume; i++) {
+ context.startCacheConsumer();
+ }
+ this.rpcLogContext =context;
+ }
+ return this.rpcLogContext;
+ }
+
+ private class RpcLogContext{
+
+ private static final String RPC_LOG_CACHE_CONSUMER = "RpcLog-Cache-Consumer-Thread-";
+ /**
+ * Send log cache
+ */
+ private final SendLogCache logCache;
+
+ /**
+ * Consume pool
+ */
+ private final ThreadPoolExecutor consumePool;
+
+ /**
+ * Count of the consumers
+ */
+ private int consumers = 0;
+
+ /**
+ * Futures of consumers
+ */
+ private final LinkedList> sendLogCacheConsumers = new LinkedList<>();
+ /**
+ * Context lock
+ */
+ private final ReentrantLock ctxLock;
+ public RpcLogContext(SendLogCache logCache){
+ this.logCache = logCache;
+ this.ctxLock = new ReentrantLock();
+ this.consumePool = new ThreadPoolExecutor(0, maxCacheConsume,
+ 60L, TimeUnit.SECONDS,
+ new SynchronousQueue<>(), new ThreadFactory() {
+ private final ThreadGroup group = Thread.currentThread().getThreadGroup();
+ private final AtomicInteger threadNum = new AtomicInteger(1);
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread t = new Thread(group, r, RPC_LOG_CACHE_CONSUMER
+ + threadNum.getAndIncrement(), 0);
+ if (t.isDaemon()) {
+ t.setDaemon(false);
+ }
+ if (t.getPriority() != Thread.NORM_PRIORITY) {
+ t.setPriority(Thread.NORM_PRIORITY);
+ }
+ return t;
+ }
+ });
+ }
+
+ public boolean startCacheConsumer(){
+ if (consumers >= maxCacheConsume) {
+
+ return false;
+ }
+ this.ctxLock.lock();
+ try {
+ if (consumers < maxCacheConsume) {
+ String id = UUID.randomUUID().toString();
+ SendBuffer sendBuffer = new ImmutableSendBuffer<>(sendBufSize);
+ SendLogCacheConsumer consumer = new SendLogCacheConsumer(id, logCache, sendBuffer, rpcSenderConfig) {
+ @Override
+ protected void onFlushAndSend(SendBuffer sendBuffer) {
+ // First to aggregate the buffer
+ E aggEntity = aggregateBuffer(sendBuffer);
+ Optional.ofNullable(getSendLogExceptionStrategy()).ifPresent(
+ strategy -> strategy.doSend(() -> {
+ doSend(aggEntity, rpcSenderConfig);
+ return null;
+ }, sendBuffer));
+ }
+ };
+ Future> future = this.consumePool.submit(consumer);
+ consumer.setFuture(future);
+ sendLogCacheConsumers.add(consumer);
+ this.consumers++;
+ return true;
+ }
+ } finally {
+ this.ctxLock.unlock();
+ }
+ return false;
+ }
+
+ public SendLogCache getLogCache(){
+ return this.logCache;
+ }
+
+ /**
+ * Destroy cache consumer(select the tail one)
+ */
+ public boolean destroyCacheConsumer(){
+ if (this.consumers <= 1){
+ return false;
+ }
+ this.ctxLock.lock();
+ try {
+ if (this.consumers > 1 && this.sendLogCacheConsumers.size() > 1) {
+ SendLogCacheConsumer consumer = sendLogCacheConsumers.removeLast();
+ consumer.shutdown();
+ this.consumers --;
+ return true;
+ }
+ } finally {
+ this.ctxLock.unlock();
+ }
+ return false;
+ }
+
+ /**
+ * Destroy all the consumers
+ */
+ public void destroyCacheConsumers(){
+ this.ctxLock.lock();
+ try {
+ sendLogCacheConsumers.forEach(SendLogCacheConsumer::shutdown);
+ sendLogCacheConsumers.clear();
+ this.consumers = 0;
+ } finally {
+ this.ctxLock.unlock();
+ }
+ }
+ }
+ /**
+ * Act as ArrayBlockingQueue (jdk 1.8)
+ */
+ private class QueuedSendLogCache implements SendLogCache{
+
+ // Queued items
+ final Object[] items;
+
+ // Take index
+ int takeIndex;
+
+ // Put index
+ int putIndex;
+
+ // Count
+ int count;
+
+ // Wait time in caching
+ final AtomicLong cacheWaitTime = new AtomicLong(0);
+
+ // Wait time in taking
+ final AtomicLong takeWaitTime = new AtomicLong(0);
+
+ // Performance of processing
+ final AtomicLong process = new AtomicLong(0);
+
+ // Control flow
+ final AtomicLong control = new AtomicLong(Long.MAX_VALUE - 1);
+
+ // If enable to discard log
+ boolean discard;
+
+ int discardCount = 0;
+
+ // Time clock
+ long clock = System.currentTimeMillis();
+
+ // interval to control
+ long controlInterval = 1 * 1000L;
+
+ // Reentrant lock
+ final ReentrantLock lock;
+
+ // Condition for waiting takes
+ private final Condition notEmpty;
+
+ // Condition for waiting puts(cacheLog)
+ private final Condition notFull;
+
+ public QueuedSendLogCache(int capacity, boolean discard, int discardWind, boolean fair) {
+ this.items = new Object[capacity];
+ lock = new ReentrantLock(fair);
+ this.notEmpty = lock.newCondition();
+ this.notFull = lock.newCondition();
+ this.discard = discard;
+ // Make the discard window size as the control interval
+ this.controlInterval = discardWind;
+ this.clock = System.currentTimeMillis() + controlInterval;
+ }
+
+ @Override
+ public void cacheLog(T logElement) throws InterruptedException {
+ // Skip the null element
+ if (Objects.nonNull(logElement)){
+ final ReentrantLock lock = this.lock;
+ boolean tryLock = lock.tryLock();
+ if (!tryLock){
+ lock.lockInterruptibly();
+ }
+ try{
+ flowControl();
+ if (discard && control.decrementAndGet() <= 0 && logElement.mark() < 2){
+ discardCount++;
+ return;
+ }
+ while (count == items.length){
+ long ws = System.currentTimeMillis();
+ notFull.await();
+ cacheWaitTime.addAndGet(System.currentTimeMillis() - ws);
+ }
+ enqueue(logElement);
+ }finally{
+ lock.unlock();
+ }
+ }
+ }
+
+ @Override
+ public int drainLogsTo(List elements, int maxElements) {
+ if (Objects.nonNull(elements) && maxElements > 0){
+ final Object[] items = this.items;
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try{
+ int n = Math.min(maxElements, count);
+ int take = takeIndex;
+ int i = 0;
+ try {
+ while (i < n){
+ @SuppressWarnings("unchecked")
+ T x = (T) items[take];
+ elements.add(x);
+ items[take] = null;
+ if (++ take == items.length)
+ take = 0;
+ i++;
+ }
+ return n;
+ }finally {
+ restoreInvariants(i, take, false);
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+ return 0;
+ }
+
+ // Equal to the poll method in ArrayBlockingQueue
+ @Override
+ public T takeLog(long timeout, TimeUnit unit) throws InterruptedException {
+ long nanos = unit.toNanos(timeout);
+ final ReentrantLock lock = this.lock;
+ T element;
+ lock.lockInterruptibly();
+ try{
+ flowControl();
+ while (count == 0){
+ long ws = System.currentTimeMillis();
+ if (nanos <= 0){
+ return null;
+ }
+ nanos = notEmpty.awaitNanos(nanos);
+ takeWaitTime.addAndGet(System.currentTimeMillis() - ws);
+ }
+ element = dequeue();
+ process.incrementAndGet();
+ } finally {
+ lock.unlock();
+ }
+ return element;
+ }
+
+ @Override
+ public boolean isCacheable() {
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try {
+ return count < items.length;
+ }finally {
+ lock.unlock();
+ }
+ }
+
+ // The same as the clear() method,
+ @Override
+ public void destroy() {
+ final Object[] items = this.items;
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try {
+ int k = count;
+ if (k > 0) {
+ final int putIndex = this.putIndex;
+ int i = takeIndex;
+ do {
+ items[i] = null;
+ if (++i == items.length)
+ i = 0;
+ } while (i != putIndex);
+ takeIndex = putIndex;
+ count = 0;
+ for (; k > 0 && lock.hasWaiters(notFull); k--)
+ notFull.signal();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Drain the elements into send buffer
+ * @param sendBuffer send buffer
+ * @param maxElements max element size
+ * @return int
+ */
+ @Override
+ public int drainLogsTo(SendBuffer sendBuffer, int maxElements) {
+ if (Objects.nonNull(sendBuffer) && maxElements > 0){
+ final Object[] items = this.items;
+ final ReentrantLock lock = this.lock;
+ lock.lock();
+ try{
+ flowControl();
+ int n = Math.min(maxElements, count);
+ int take = takeIndex;
+ int i = 0;
+ int send;
+ try {
+ while (n > 0) {
+ int len = items.length - take;
+ int send0 = Math.min(n, len);
+ // Copy the array element to buffer directly
+ send = sendBuf(sendBuffer, this.items, take, send0);
+ n -= send;
+ if ((take = take + send) >= items.length) {
+ take = 0;
+ }
+ i += send;
+ if (send < send0 || send <= 0) {
+ break;
+ }
+ }
+ process.addAndGet(i);
+ return i;
+ } finally {
+ if (i > 0){
+ restoreInvariants(i, take, true);
+ }
+ }
+ }finally {
+ lock.unlock();
+ }
+ }
+ return 0;
+ }
+
+ @SuppressWarnings("unchecked")
+ private int sendBuf(SendBuffer sendBuffer, Object[] items, int takeIndex, int len){
+ int send = sendBuffer.writeBuf(items, takeIndex, len);
+ if (send < len){
+ // Buffer full exception
+ exceptionListener.onException(this, null, "The sender buffer is full," +
+ " expected: [" + len + "], actual: [" + send + "]");
+ }
+ // Allow data loss
+ return send;
+ }
+
+ private void restoreInvariants(int i, int take, boolean clearItems){
+ this.count -= i;
+ if (clearItems){
+ int index = this.takeIndex;
+ int j = i;
+ for (; j > 0; j --){
+ this.items[index] = null;
+ if (++index == items.length){
+ index = 0;
+ }
+ }
+ //At last index equals take
+ }
+ this.takeIndex = take;
+ for (; i > 0 && lock.hasWaiters(notFull); i--){
+ notFull.signal();
+ }
+ }
+ // Inserts element at current put position, advances, and signals. Call only when holding lock.
+ private void enqueue(T element){
+ this.items[putIndex] = element;
+ if (++putIndex >= items.length){
+ putIndex = 0;
+ }
+ count ++;
+ notEmpty.signal();
+ }
+
+ // Extracts element at current take position, advances, and signals. Call only when holding lock.
+ private T dequeue(){
+ @SuppressWarnings("unchecked")
+ T element = (T)this.items[takeIndex];
+ this.items[takeIndex] = null;
+ if ( ++ takeIndex == items.length){
+ this.takeIndex = 0;
+ }
+ count --;
+ // Not need to support iterator
+ notFull.signal();
+ return element;
+ }
+
+ /**
+ * Flow control
+ */
+ private void flowControl(){
+ long ws = System.currentTimeMillis();
+ if (clock <= ws) {
+ long interval = ws - clock + controlInterval;
+ clock = ws + controlInterval;
+ if (rpcSenderConfig.isDebugMode()) {
+ System.out.println("cacheWait: " + cacheWaitTime.get() + ", takeWait:" + takeWaitTime.get() + ", discarded: " + discardCount);
+ }
+ if (takeWaitTime.get() <= 0 && process.get() > 0){
+ this.control.set((long) ((double)process.get() * ((double)controlInterval / (double)interval)));
+ if (rpcSenderConfig.isDebugMode()) {
+ System.out.println("new window control: " + this.control.get());
+ }
+ } else {
+ this.control.set(Long.MAX_VALUE);
+ }
+ cacheWaitTime.set(0);
+ takeWaitTime.set(0);
+ process.set(0);
+ discardCount = 0;
+ }
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java
new file mode 100644
index 000000000..8254f0a34
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/RpcLogSender.java
@@ -0,0 +1,39 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.ExceptionListener;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+/**
+ * Rpc Log sender
+ */
+public interface RpcLogSender {
+
+ /**
+ * Produce log cache
+ * @return log cache
+ */
+ LogCache getOrCreateLogCache();
+
+ /**
+ * Send log (async)
+ * @param log log element
+ */
+ void sendLog(T log);
+
+ /**
+ * Send log (sync)
+ * @param log log element
+ */
+ void syncSendLog(T log);
+
+ /**
+ * Exception listener
+ * @param listener listener
+ */
+ void setExceptionListener(ExceptionListener listener);
+ /**
+ * Close sender
+ */
+ void close();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java
new file mode 100644
index 000000000..56b627905
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCache.java
@@ -0,0 +1,20 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+/**
+ * Send log cache
+ * @param
+ */
+public interface SendLogCache extends LogCache {
+
+ /**
+ * Drain the logs into send buffer
+ * @param sendBuffer send buffer
+ * @param maxElements max element size
+ * @return count
+ */
+ int drainLogsTo(SendBuffer sendBuffer, int maxElements);
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java
new file mode 100644
index 000000000..9d68f408e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogCacheConsumer.java
@@ -0,0 +1,132 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+/**
+ * Send log consumer
+ * Consume the log elements from cache and put into send buffer
+ * @param
+ */
+public abstract class SendLogCacheConsumer implements Runnable{
+
+
+ private boolean isTerminated = false;
+
+ /**
+ * Buffer expire time in milliseconds
+ */
+ private final long bufferExpireTimeInMills;
+ /**
+ * Send log cache
+ */
+ private final SendLogCache cache;
+
+ /**
+ * Send buffer
+ */
+ private final SendBuffer sendBuffer;
+
+ private final String id;
+
+ /**
+ * Future for execution
+ */
+ private Future> future;
+
+ protected SendLogCacheConsumer(String id, SendLogCache cache,
+ SendBuffer sendBuffer,
+ RpcLogSenderConfig rpcSenderConfig){
+ this.id = id;
+ this.cache = cache;
+ this.sendBuffer = sendBuffer;
+ long expireTimeInSec = rpcSenderConfig.getBufferConfig().getExpireTimeInSec();
+ this.bufferExpireTimeInMills = expireTimeInSec > 0 ? TimeUnit.SECONDS
+ .toMillis(expireTimeInSec) : -1;
+
+ }
+
+ @Override
+ public void run() {
+ int remain;
+ long expireTimeInMills = requireNewFlushTime();
+ int capacity = sendBuffer.capacity();
+ while (!this.isTerminated) {
+ try {
+ remain = this.sendBuffer.remaining();
+ if ((expireTimeInMills > 0 && expireTimeInMills <= System.currentTimeMillis()) || remain <= 0) {
+ // Transient to the read mode
+ if (remain < capacity) {
+ sendBuffer.flip();
+ onFlushAndSend(sendBuffer);
+ }
+ expireTimeInMills = requireNewFlushTime();
+ if (sendBuffer.isReadMode()) {
+ // Clear the buffer and transient to the write mode, otherwise continue writing
+ sendBuffer.clear();
+ }
+ remain = this.sendBuffer.remaining();
+ }
+ if (remain > 0) {
+ int inBuf = this.cache.drainLogsTo(sendBuffer, remain);
+ if (inBuf < remain) {
+ // Means that the cache is empty, take and wait the log element
+ long waitTime = expireTimeInMills - System.currentTimeMillis();
+ if (waitTime > 0) {
+ T logElement = this.cache.takeLog(waitTime, TimeUnit.MILLISECONDS);
+ if (null != logElement) {
+ sendBuffer.writeBuf(logElement);
+ }
+ }
+ }
+ }
+ } catch (Exception e){
+ if (this.isTerminated && e instanceof InterruptedException){
+ return;
+ } else {
+ e.printStackTrace();
+ System.err.println("SendLogCacheConsumer[" + Thread.currentThread().getName() + "] occurred exception [" + e.getLocalizedMessage() + "]");
+ // For the unknown exception clear the cache
+ sendBuffer.clear();
+ expireTimeInMills = requireNewFlushTime();
+ }
+ try {
+ Thread.sleep(500);
+ } catch (InterruptedException ex) {
+ ex.printStackTrace();
+ }
+ }
+ }
+ }
+
+ public void shutdown(){
+ this.isTerminated = true;
+ if (null != this.future){
+ this.future.cancel(true);
+ }
+ }
+
+ public Future> getFuture() {
+ return future;
+ }
+
+ public void setFuture(Future> future) {
+ this.future = future;
+ }
+
+ private long requireNewFlushTime(){
+ return bufferExpireTimeInMills > 0 ? System.currentTimeMillis() + bufferExpireTimeInMills : -1;
+ }
+ /**
+ * When the buffer is full or reach the idle time, invoke the method
+ * @param sendBuffer send buffer
+ */
+ protected abstract void onFlushAndSend(SendBuffer sendBuffer);
+
+ public String getId() {
+ return id;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java
new file mode 100644
index 000000000..a547fb534
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/SendLogExceptionStrategy.java
@@ -0,0 +1,61 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+import java.util.Objects;
+import java.util.concurrent.Callable;
+
+/**
+ * Strategy control the action on exception
+ */
+public abstract class SendLogExceptionStrategy {
+
+ protected final RpcLogSender sender;
+
+ protected SendLogExceptionStrategy(RpcLogSender sender){
+ this.sender = sender;
+ }
+ /**
+ * Retry count
+ * @return retry
+ */
+ public abstract int retryCount();
+
+ /**
+ *
+ * @param e exception
+ * @return boolean
+ */
+ public abstract RetryDescription onException(Exception e, SendBuffer sendBuffer);
+
+ V doSend(Callable sendOperation, SendBuffer sendBuffer){
+ int retryCount = retryCount();
+ int count = 0;
+ RetryDescription retryDescription;
+ while (++count <= retryCount) {
+ try {
+ return sendOperation.call();
+ } catch (Exception e) {
+ retryDescription = onException(e, sendBuffer);
+ if (Objects.isNull(retryDescription) || !retryDescription.canRetry) {
+ break;
+ }
+ }
+ }
+ return null;
+ }
+
+ protected static class RetryDescription{
+
+ private final boolean canRetry;
+
+ public RetryDescription(boolean canRetry){
+ this.canRetry = canRetry;
+ }
+
+ public boolean isCanRetry() {
+ return canRetry;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java
new file mode 100644
index 000000000..d019c29f5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/StreamisRpcLogSender.java
@@ -0,0 +1,45 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender;
+
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.AbstractHttpLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents;
+
+/**
+ * Log sender for streamis
+ */
+public class StreamisRpcLogSender extends AbstractHttpLogSender {
+
+ /**
+ * Each sender register an application
+ */
+ private final String applicationName;
+
+ public StreamisRpcLogSender(String applicationName, RpcLogSenderConfig rpcSenderConfig) {
+ super(rpcSenderConfig);
+ this.applicationName = applicationName;
+ }
+
+ /**
+ * Aggregate to streamis log events
+ * @param sendBuffer send buffer
+ * @return
+ */
+ @Override
+ protected StreamisLogEvents aggregateBuffer(SendBuffer sendBuffer) {
+ int remain = sendBuffer.remaining();
+ if (remain > 0) {
+ StreamisLogEvent[] logEvents = new StreamisLogEvent[remain];
+ sendBuffer.readBuf(logEvents, 0, logEvents.length);
+ return new StreamisLogEvents(applicationName, logEvents);
+ }
+ return null;
+ }
+
+ @Override
+ protected String convertToJsonString(StreamisLogEvents aggregatedEntity) {
+ return aggregatedEntity.toJson();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java
new file mode 100644
index 000000000..5b11a4659
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/AbstractSendBuffer.java
@@ -0,0 +1,140 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf;
+
+import java.util.function.Predicate;
+
+/**
+ * Abstract sender buffer;
+ * non-blocking and reduces out-of-bounds exceptions
+ */
+public abstract class AbstractSendBuffer implements SendBuffer{
+
+ @SuppressWarnings("unchecked")
+ public abstract SendBuffer compact(Predicate dropAble);
+
+ protected enum Flag{
+ WRITE_MODE, READ_MODE
+ }
+
+ /**
+ * Access flag
+ */
+ private Flag accessFlag = Flag.WRITE_MODE;
+
+ private int position = 0;
+ private int limit;
+ /**
+ * The capacity is mutable
+ */
+ protected int capacity;
+
+
+ protected AbstractSendBuffer(int capacity){
+ this.capacity = capacity;
+ limit(this.capacity);
+ }
+
+ protected AbstractSendBuffer(){
+ this(Integer.MAX_VALUE);
+ }
+
+ @Override
+ public boolean isReadMode() {
+ return accessFlag == Flag.READ_MODE;
+ }
+
+ @Override
+ public boolean isWriteMode() {
+ return accessFlag == Flag.WRITE_MODE;
+ }
+
+ @Override
+ public int capacity() {
+ return this.capacity;
+ }
+
+ @Override
+ public int remaining() {
+ int rem = this.limit - this.position;
+ return Math.max(rem, 0);
+ }
+
+ @Override
+ public void flip() {
+ checkFlag(Flag.WRITE_MODE);
+ this.limit = this.position;
+ this.position = 0;
+ this.accessFlag = Flag.READ_MODE;
+ }
+
+ @Override
+ public void rewind() {
+ position = 0;
+ }
+
+ @Override
+ public void clear() {
+ limit(this.capacity);
+ this.position = 0;
+ this.accessFlag = Flag.WRITE_MODE;
+ clearBuf();
+ }
+
+ /**
+ * Change the limit value
+ * @param newLimit new limit
+ */
+ final void limit(int newLimit){
+ if (newLimit > this.capacity || (newLimit < 0)){
+ throw new IllegalArgumentException("Set the illegal limit value: " + newLimit + " in send buffer, [capacity: " + this.capacity + "]");
+ }
+ this.limit = newLimit;
+ if (this.position > newLimit){
+ this.position = newLimit;
+ }
+ }
+
+ /**
+ * Inc the position with offset
+ * @param offset offset value
+ * @param accessFlag access flag
+ * @return the current position value
+ */
+ final int nextPosition(int offset, Flag accessFlag){
+ checkFlag(accessFlag);
+ int p = position;
+ // Reach the limit, return -1 value
+ if (p >= limit){
+ return -1;
+ }
+ if (p + offset > limit){
+ this.position = limit;
+ } else {
+ this.position = p + offset;
+ }
+ return p;
+ }
+
+ final void checkFlag(Flag accessFlag){
+ if (this.accessFlag != accessFlag){
+ throw new IllegalStateException("Illegal access flag [" + accessFlag + "] for send buffer");
+ }
+ }
+ final void setFlag(Flag accessFlag){
+ this.accessFlag = accessFlag;
+ }
+ /**
+ *
+ * @return the current position
+ */
+ final int position(){
+ return this.position;
+ }
+
+ final void position(int position){
+ this.position = position;
+ }
+ /**
+ * Do the actual clear
+ */
+ protected abstract void clearBuf();
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java
new file mode 100644
index 000000000..734bc1490
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/ImmutableSendBuffer.java
@@ -0,0 +1,102 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf;
+
+import java.util.Arrays;
+import java.util.function.Predicate;
+
+/**
+ * Immutable send buffer (use array)
+ */
+public class ImmutableSendBuffer extends AbstractSendBuffer{
+
+ /**
+ * Buffer object array
+ */
+ private final Object[] buf;
+
+ public ImmutableSendBuffer(int capacity) {
+ super(capacity);
+ buf = new Object[capacity];
+ }
+
+ @Override
+ protected void clearBuf() {
+ // Release the memory occupied
+ Arrays.fill(buf, null);
+ }
+
+ @Override
+ public void capacity(String newCapacity) {
+ throw new IllegalArgumentException("Unsupported to scale-in/scale-up the send buffer");
+ }
+
+ @Override
+ @SuppressWarnings("all")
+ public int writeBuf(Object[] elements, int srcIndex, int length) {
+ if (srcIndex < elements.length){
+ int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.WRITE_MODE);
+ if (startPos >= 0){
+ int writes = position() - startPos;
+ System.arraycopy(elements, srcIndex, this.buf, startPos, writes);
+ return writes;
+ }
+ }
+ return -1;
+ }
+
+ @Override
+ @SuppressWarnings("all")
+ public int readBuf(Object[] elements, int srcIndex, int length) {
+ if (srcIndex < elements.length){
+ int startPos = nextPosition(Math.min(elements.length - srcIndex, length), Flag.READ_MODE);
+ if (startPos >= 0){
+ int reads = position() - startPos;
+ System.arraycopy(this.buf, startPos, elements, srcIndex, reads);
+ return reads;
+ }
+ }
+ return -1;
+ }
+
+ @Override
+ public int writeBuf(E element) {
+ int startPos = nextPosition(1, Flag.WRITE_MODE);
+ if (startPos >= 0){
+ buf[startPos] = element;
+ return 1;
+ }
+ return -1;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public E readBuf() {
+ int startPos = nextPosition(1, Flag.READ_MODE);
+ if (startPos >= 0){
+ return (E)buf[startPos];
+ }
+ return null;
+ }
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public SendBuffer compact(Predicate dropAble) {
+ checkFlag(Flag.READ_MODE);
+ int offset = 0;
+ int compact = position() - 1;
+ for(int i = position(); i < capacity; i ++){
+ Object element = buf[i];
+ if (dropAble.test((E) element)){
+ buf[i] = null;
+ offset ++;
+ } else {
+ compact = i - offset;
+ buf[compact] = element;
+ }
+ }
+ position(compact + 1);
+ limit(this.capacity);
+ setFlag(Flag.WRITE_MODE);
+ return this;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java
new file mode 100644
index 000000000..e67191996
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/buf/SendBuffer.java
@@ -0,0 +1,93 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf;
+
+
+import java.util.function.Predicate;
+
+/**
+ * Buffer for Rpc sender
+ * @param buffer element
+ */
+public interface SendBuffer {
+
+ /**
+ * Capacity
+ * @return int
+ */
+ int capacity();
+
+ /**
+ * Is read mode
+ * @return boolean
+ */
+ boolean isReadMode();
+
+ /**
+ * Is write mode
+ * @return boolean
+ */
+ boolean isWriteMode();
+ /**
+ * Scale-up or scale-in
+ * @param newCapacity new capacity
+ */
+ void capacity(String newCapacity);
+ /**
+ * Remain size
+ * (remain space for writing or remain elements for reading)
+ * @return int
+ */
+ int remaining();
+
+ /**
+ * Transient between write-mode and read-mode
+ */
+ void flip();
+
+ /**
+ * Restart from the beginning of window
+ */
+ void rewind();
+ /**
+ * Clear to reuse the buffer
+ */
+ void clear();
+ /**
+ * Write buffer element
+ * @param element element
+ * @return if succeed
+ */
+ int writeBuf(E element);
+
+ /**
+ * Write buffer element array
+ * @param elements elements
+ * @param srcIndex the src index in elements
+ * @param length the length to read
+ * @return write num
+ */
+ int writeBuf(Object[] elements, int srcIndex, int length);
+
+ /**
+ * Read buffer element
+ * @return element
+ */
+ E readBuf();
+
+ /**
+ * Read buffer element array
+ * @param elements elements
+ * @param srcIndex the src index in elements
+ * @param length the length to write
+ * @return read num
+ */
+ int readBuf(Object[] elements, int srcIndex, int length);
+
+ /**
+ * Compact the buffer, avoid the useless elements
+ * @param dropAble drop function
+ * @return send buffer
+ */
+ SendBuffer compact(Predicate dropAble);
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java
new file mode 100644
index 000000000..36baad8b9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/AbstractHttpLogSender.java
@@ -0,0 +1,163 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.AbstractRpcLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.SendLogExceptionStrategy;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.buf.SendBuffer;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request.StringPostAction;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.HttpResponseException;
+import org.apache.http.client.methods.CloseableHttpResponse;
+import org.apache.http.conn.ConnectTimeoutException;
+
+import javax.net.ssl.SSLException;
+import java.io.*;
+import java.net.UnknownHostException;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+
+public abstract class AbstractHttpLogSender extends AbstractRpcLogSender {
+
+ /**
+ * Retry strategy
+ */
+ private final SendLogExceptionStrategy sendRetryStrategy;
+
+ /**
+ * Exception counter
+ */
+ private final AtomicInteger exceptionCounter = new AtomicInteger();
+ /**
+ * Hold the global http client
+ */
+ private final HttpClient globalHttpClient;
+
+ /**
+ * Recover time point
+ */
+ private final AtomicLong serverRecoveryTimePoint = new AtomicLong(-1L);
+
+ protected AbstractHttpLogSender(RpcLogSenderConfig rpcSenderConfig) {
+ super(rpcSenderConfig);
+ this.globalHttpClient = HttpClientTool.createHttpClient(rpcSenderConfig);
+ this.sendRetryStrategy = new SendLogExceptionStrategy(this) {
+
+ private final Class>[] retryOnExceptions = new Class>[]{
+ InterruptedIOException.class, UnknownHostException.class,
+ ConnectTimeoutException.class, SSLException.class};
+ @Override
+ public int retryCount() {
+ return rpcSenderConfig.getSendRetryCnt();
+ }
+
+ @Override
+ public SendLogExceptionStrategy.RetryDescription onException(Exception e, SendBuffer sendBuffer) {
+ boolean shouldRetry = false;
+ // Limit of exception number is the same as the retry times
+ if (exceptionCounter.incrementAndGet() > retryCount()){
+ serverRecoveryTimePoint.set(System.currentTimeMillis() +
+ TimeUnit.SECONDS.toMillis(rpcSenderConfig.getServerRecoveryTimeInSec()));
+ } else {
+ for (Class> retryOnException : retryOnExceptions) {
+ if (retryOnException.equals(e.getClass())) {
+ shouldRetry = true;
+ break;
+ }
+ }
+ if (!shouldRetry && e instanceof HttpResponseException && ((HttpResponseException) e).getStatusCode() < 500){
+ shouldRetry = true;
+ }
+ }
+ if (shouldRetry && !sender.getOrCreateLogCache().isCacheable()){
+ // Means that the cache is full
+ // Set the position of buffer to 0
+ sendBuffer.rewind();
+ sendBuffer.compact( element -> element.mark() > 1);
+ shouldRetry = false;
+ }
+ Optional.ofNullable(exceptionListener).ifPresent(listener -> listener.onException(sender, e, null));
+ return new RetryDescription(shouldRetry);
+ }
+ };
+ }
+
+ @Override
+ protected SendLogExceptionStrategy getSendLogExceptionStrategy() {
+ return this.sendRetryStrategy;
+ }
+
+ @Override
+ protected void doSend(E aggregatedEntity, RpcLogSenderConfig rpcSenderConfig) throws IOException {
+ if (System.currentTimeMillis() >= serverRecoveryTimePoint.get()) {
+ if (aggregatedEntity instanceof LogElement) {
+ long timestamp = ((LogElement) aggregatedEntity).getLogTimeStamp();
+ if (System.currentTimeMillis() - timestamp > rpcSenderConfig.getMaxDelayTimeInSec() * 1000L) {
+ // Abort the entity
+ return;
+ }
+ }
+ httpResponse(aggregatedEntity,rpcSenderConfig);
+ }
+ }
+
+ private void httpResponse(E aggregatedEntity,RpcLogSenderConfig rpcSenderConfig) throws IOException {
+ String address = rpcSenderConfig.getAddress();
+ if (null != address && !address.trim().equals("")) {
+ StringPostAction postAction = new StringPostAction(rpcSenderConfig.getAddress(), convertToJsonString(aggregatedEntity));
+ RpcAuthConfig authConfig = rpcSenderConfig.getAuthConfig();
+ postAction.getRequestHeaders().put(authConfig.getTokenUserKey(), authConfig.getTokenUser());
+ HttpResponse response = null;
+ try {
+ response = postAction.execute(this.globalHttpClient);
+ int statusCode = response.getStatusLine().getStatusCode();
+ if (statusCode > 200){
+ throw new HttpResponseException(statusCode,
+ convertToString(response.getEntity().getContent(), StandardCharsets.UTF_8));
+ }
+ }finally {
+ // Close the response and release the conn
+ if (null != response){
+ if (response instanceof CloseableHttpResponse){
+ ((CloseableHttpResponse)response).close();
+ } else {
+ // Destroy the stream
+ response.getEntity().getContent().close();
+ }
+ }
+ }
+ // Init the counter
+ this.exceptionCounter.set(0);
+ }
+ }
+ /**
+ * Convert input to string
+ * @param inputStream input stream
+ * @param charset charset
+ * @return string value
+ * @throws IOException
+ */
+ private String convertToString(InputStream inputStream, Charset charset) throws IOException {
+ StringBuilder builder = new StringBuilder();
+ try(BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream, charset))){
+ String line;
+ while((line = reader.readLine()) != null){
+ builder.append(line);
+ }
+ }
+ return builder.toString();
+ }
+
+ /**
+ * Convert the entity to json
+ * @param aggregatedEntity aggregated entity
+ * @return json string
+ */
+ protected abstract String convertToJsonString(E aggregatedEntity);
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java
new file mode 100644
index 000000000..7f62fc6fe
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/HttpClientTool.java
@@ -0,0 +1,71 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import org.apache.http.Header;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.config.RequestConfig;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.message.BasicHeader;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Http clients
+ */
+public class HttpClientTool {
+
+ private HttpClientTool() {}
+
+ /**
+ * Connect timeout
+ */
+ public static final int DEFAULT_CONNECT_TIMEOUT = 3000;
+
+ /**
+ * Socket timeout
+ */
+ public static final int DEFAULT_SOCKET_TIMEOUT = 15000;
+
+ /**
+ * Max connections
+ */
+ public static final int DEFAULT_MAX_CONN = 10;
+
+ /**
+ * Create http client
+ * @param rpcSenderConfig rpc sender config
+ * @return http client
+ */
+ public static HttpClient createHttpClient(RpcLogSenderConfig rpcSenderConfig){
+ int connectTimeout = rpcSenderConfig.getConnectionTimeout() > 0? rpcSenderConfig.getConnectionTimeout() : DEFAULT_CONNECT_TIMEOUT;
+ int socketTimeout = rpcSenderConfig.getSocketTimeout() > 0? rpcSenderConfig.getSocketTimeout() : DEFAULT_SOCKET_TIMEOUT;
+ RequestConfig requestConfig = RequestConfig.custom()
+ .setConnectTimeout(connectTimeout)
+ .setConnectionRequestTimeout(socketTimeout)
+ .setSocketTimeout(socketTimeout)
+ .build();
+ int maxConsumeThread = rpcSenderConfig.getCacheConfig().getMaxConsumeThread();
+ int maxConn = maxConsumeThread > 0? maxConsumeThread : DEFAULT_MAX_CONN;
+ HttpClientBuilder clientBuilder = HttpClients.custom();
+ String tokenValue = rpcSenderConfig.getAuthConfig().getTokenCode();
+ List defaultHeaders = new ArrayList<>();
+ if (null != tokenValue && !tokenValue.trim().equals("")){
+ defaultHeaders.add(new BasicHeader(rpcSenderConfig.getAuthConfig().getTokenCodeKey(), tokenValue));
+ }
+ clientBuilder.setDefaultRequestConfig(requestConfig).setDefaultHeaders(defaultHeaders)
+ .useSystemProperties().setMaxConnTotal(maxConn).setMaxConnPerRoute(maxConn);
+ CloseableHttpClient httpClient = clientBuilder.build();
+ Runtime.getRuntime().addShutdownHook(new Thread(() -> {
+ try {
+ httpClient.close();
+ } catch (IOException e) {
+ // Ignore
+ }
+ }));
+ return httpClient;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java
new file mode 100644
index 000000000..eaa355e92
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/entities/Resource.java
@@ -0,0 +1,17 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.entities;
+
+import java.io.File;
+import java.util.List;
+
+/**
+ * Entity with resources
+ */
+public interface Resource {
+
+ /**
+ * Resources related
+ * @return file list
+ */
+ List getResources();
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java
new file mode 100644
index 000000000..b35a17d35
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/AbstractHttpAction.java
@@ -0,0 +1,63 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpRequestBase;
+
+import java.io.IOException;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Abstract implement
+ * @param
+ */
+public abstract class AbstractHttpAction implements HttpAction {
+
+ protected String uri;
+
+ protected String user;
+
+ protected AbstractHttpAction(String uri){
+ this.uri = uri;
+ }
+
+ @Override
+ public String uri() {
+ return uri;
+ }
+
+ /**
+ * Request method
+ * @return method
+ */
+ protected abstract T getRequestMethod();
+
+ private Map requestHeaders = new HashMap<>();
+
+ private Map requestPayload = new HashMap<>();
+
+ @Override
+ public Map getRequestHeaders() {
+ return this.requestHeaders;
+ }
+
+ @Override
+ public Map getRequestPayload() {
+ return this.requestPayload;
+ }
+
+ @Override
+ public HttpResponse execute(HttpClient httpClient) throws IOException {
+ HttpRequestBase requestBase = getRequestMethod();
+ try{
+ requestBase.setURI(new URI(uri));
+ } catch (URISyntaxException e) {
+ throw new IllegalArgumentException("URI maybe has wrong format", e);
+ }
+ requestHeaders.forEach(requestBase::setHeader);
+ return httpClient.execute(requestBase);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java
new file mode 100644
index 000000000..87435f8a3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/HttpAction.java
@@ -0,0 +1,38 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+
+import java.io.IOException;
+import java.util.Map;
+
+/**
+ * Http action
+ */
+public interface HttpAction {
+
+ /**
+ * URI path
+ * @return path
+ */
+ String uri();
+
+ /**
+ * Request headers
+ * @return map
+ */
+ Map getRequestHeaders();
+
+ /**
+ * Request pay load(body)
+ * @return map
+ */
+ Map getRequestPayload();
+
+ /**
+ * Execute http action
+ * @return http response
+ */
+ HttpResponse execute(HttpClient httpClient) throws IOException;
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java
new file mode 100644
index 000000000..6ce0d8cdf
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/sender/http/request/StringPostAction.java
@@ -0,0 +1,29 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.http.request;
+
+import org.apache.http.client.methods.HttpPost;
+import org.apache.http.entity.ContentType;
+import org.apache.http.entity.StringEntity;
+
+/**
+ * Use string to request
+ */
+public class StringPostAction extends AbstractHttpAction {
+
+ /**
+ * Raw string value
+ */
+ private final String rawString;
+ public StringPostAction(String uri, String rawString) {
+ super(uri);
+ this.rawString = rawString;
+ }
+
+ @Override
+ protected HttpPost getRequestMethod() {
+ HttpPost httpPost = new HttpPost();
+ StringEntity stringEntity = new StringEntity(rawString, "UTF-8");
+ stringEntity.setContentType(ContentType.APPLICATION_JSON.toString());
+ httpPost.setEntity(stringEntity);
+ return httpPost;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java
new file mode 100644
index 000000000..08002eb35
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/utils/StringUtils.java
@@ -0,0 +1,24 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.utils;
+
+
+/**
+ * Tool to operate str
+ */
+public class StringUtils {
+
+ private StringUtils(){}
+ /**
+ * Convert string to array
+ * @param input string
+ * @param delimiter delimiter
+ * @return array
+ */
+ public static String[] convertStrToArray(String input, String delimiter){
+ if (null != input && !input.trim().equals("") &&
+ !input.equals(delimiter.trim())){
+ return input.split(",");
+ }
+ return new String[0];
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java
new file mode 100644
index 000000000..a0228a156
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector-core/src/main/java/com/webank/wedatasphere/streamis/jobmanager/plugin/StreamisConfigAutowired.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.plugin;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+
+/**
+ * Streamis config autowired
+ */
+public interface StreamisConfigAutowired {
+
+ /**
+ * Log appender config
+ * @param builder builder
+ */
+ StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws IllegalAccessException;
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml
new file mode 100644
index 000000000..3ab465c14
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/pom.xml
@@ -0,0 +1,64 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-collector
+
+
+ 8
+ 8
+ 2.17.1
+ 1.7.15
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector-core
+ ${streamis.version}
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-slf4j-impl
+ ${log4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-api
+ ${log4j.version}
+ provided
+
+
+
+ org.apache.logging.log4j
+ log4j-core
+ ${log4j.version}
+ provided
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
new file mode 100644
index 000000000..841d055a8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
@@ -0,0 +1,125 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.StreamisLog4j2AppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.Layout;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.appender.AbstractAppender;
+import org.apache.logging.log4j.core.config.Property;
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginElement;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+
+import java.io.Serializable;
+import java.util.*;
+import java.util.function.BiPredicate;
+
+/**
+ * Streamis rpc log appender
+ */
+@Plugin(name = "StreamRpcLog", category = "Core", elementType = "appender", printObject = true)
+public class StreamisRpcLogAppender extends AbstractAppender {
+
+ private static final String DEFAULT_APPENDER_NAME = "StreamRpcLog";
+ /**
+ * Appender config
+ */
+ private final StreamisLogAppenderConfig appenderConfig;
+
+ /**
+ * Rpc log sender
+ */
+ private final StreamisRpcLogSender rpcLogSender;
+
+ /**
+ * Cache
+ */
+ private final LogCache logCache;
+
+ /**
+ * Filter function
+ */
+ private BiPredicate messageFilterFunction = (logger, message) -> true;
+
+ protected StreamisRpcLogAppender(String name, Filter filter,
+ Layout extends Serializable> layout,
+ boolean ignoreExceptions, Property[] properties,
+ StreamisLogAppenderConfig appenderConfig) {
+ super(name, filter, layout, ignoreExceptions, properties);
+ this.appenderConfig = appenderConfig;
+ this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(),
+ this.appenderConfig.getSenderConfig());
+ this.rpcLogSender.setExceptionListener((subject, t, message) ->
+ LOGGER.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t));
+ this.logCache = this.rpcLogSender.getOrCreateLogCache();
+ List messageFilters = appenderConfig.getMessageFilters();
+ if (null != messageFilters && messageFilters.size() > 0){
+ messageFilterFunction = (logger, message) ->{
+ for(LogMessageFilter messageFilter : messageFilters){
+ if (!messageFilter.doFilter(logger, message)){
+ return false;
+ }
+ }
+ return true;
+ };
+ }
+ Runtime.getRuntime().addShutdownHook(new Thread(this.rpcLogSender::close));
+ }
+
+ @Override
+ public void append(LogEvent event) {
+ String content = Arrays.toString(getLayout().toByteArray(event));
+ if (messageFilterFunction.test(event.getLoggerName(), content)) {
+ StreamisLogEvent logEvent = new StreamisLogEvent(content, event.getTimeMillis());
+ try {
+ this.logCache.cacheLog(logEvent);
+ } catch (InterruptedException e) {
+ LOGGER.error("StreamisRpcLogAppender: {} interrupted when cache the log into the RPC sender, message: {}", this.getName(), e.getMessage());
+
+ }
+ }
+ }
+
+ @PluginFactory
+ public static StreamisRpcLogAppender createAppender(@PluginAttribute("name") String name,
+ @PluginAttribute("appName") String applicationName,
+ @PluginAttribute("ignoreExceptions") boolean ignoreExceptions,
+ @PluginElement("Filter") final Filter filter,
+ @PluginElement("Layout") Layout extends Serializable> layout,
+ @PluginElement("RpcLogSender")RpcLogSenderConfig rpcLogSenderConfig) throws IllegalAccessException {
+ if (null == name || name.trim().equals("")){
+ name = DEFAULT_APPENDER_NAME;
+ }
+ if (Objects.isNull(layout)){
+ layout = PatternLayout.createDefaultLayout();
+ }
+ // Search the config autowired class
+ List configAutowiredEntities = new ArrayList<>();
+ StreamisLog4j2AppenderConfig logAppenderConfig = null;
+ ServiceLoader.load(StreamisConfigAutowired.class,
+ StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add);
+ StreamisLog4j2AppenderConfig.Builder builder = new StreamisLog4j2AppenderConfig.Builder(applicationName, filter, rpcLogSenderConfig);
+ for (StreamisConfigAutowired autowired : configAutowiredEntities){
+ logAppenderConfig = (StreamisLog4j2AppenderConfig) autowired.logAppenderConfig(builder);
+ }
+ if (Objects.isNull(logAppenderConfig)){
+ logAppenderConfig = builder.build();
+ }
+ applicationName = logAppenderConfig.getApplicationName();
+ if (null == applicationName || applicationName.trim().equals("")){
+ throw new IllegalArgumentException("Application name cannot be empty");
+ }
+ System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig);
+ return new StreamisRpcLogAppender(name, logAppenderConfig.getFilter(), layout, ignoreExceptions, Property.EMPTY_ARRAY, logAppenderConfig);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java
new file mode 100644
index 000000000..adf7dfe06
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/StreamisLog4j2AppenderConfig.java
@@ -0,0 +1,97 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.filter.CompositeFilter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Appender config for log4j2
+ */
+public class StreamisLog4j2AppenderConfig extends StreamisLogAppenderConfig {
+ /**
+ * Filter in log4j2
+ */
+ private final Filter filter;
+
+ public StreamisLog4j2AppenderConfig(String applicationName, Filter filter,
+ RpcLogSenderConfig rpcLogSenderConfig, List messageFilters){
+ super(applicationName, rpcLogSenderConfig, messageFilters);
+ this.filter = filter;
+ }
+
+ public static class Builder extends StreamisLogAppenderConfig.Builder {
+
+ /**
+ * Filter rules
+ */
+ private final List filters = new ArrayList<>();
+
+ public Builder(String applicationName, Filter filter, RpcLogSenderConfig rpcLogSenderConfig) {
+ super(applicationName, rpcLogSenderConfig);
+ if (Objects.nonNull(filter)) {
+ this.filters.add(filter);
+ }
+ }
+
+ /**
+ * Set filter
+ * @param filter filter
+ * @return builder
+ */
+ public StreamisLog4j2AppenderConfig.Builder setFilter(Filter filter){
+ this.filters.clear();
+ this.messageFilters.clear();
+ this.filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ /**
+ * Append filter
+ * @param filter filter
+ * @return builder
+ */
+ public StreamisLog4j2AppenderConfig.Builder withFilter(Filter filter){
+ filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ /**
+ * Build method
+ * @return config
+ */
+ public StreamisLog4j2AppenderConfig build(){
+ Filter logFilter = null;
+ if (filters.size() > 1){
+ logFilter = CompositeFilter.createFilters(filters.toArray(new Filter[0]));
+ } else if (!filters.isEmpty()){
+ logFilter = filters.get(0);
+ }
+ return new StreamisLog4j2AppenderConfig(applicationName, logFilter, rpcLogSenderConfig, messageFilters);
+ }
+ }
+ public Filter getFilter() {
+ return filter;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamisLog4j2AppenderConfig{" +
+ "applicationName='" + applicationName + '\'' +
+ ", senderConfig=" + senderConfig +
+ ", filter=" + filter +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java
new file mode 100644
index 000000000..87a10ba85
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcAuthConfig.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+
+/**
+ * AuthConfig Element in log4j2
+ */
+@Plugin(
+ name = "AuthConfig",
+ category = "Core",
+ printObject = true
+)
+public class RpcAuthConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcAuthConfig {
+
+ public RpcAuthConfig(){
+ super();
+ }
+ public RpcAuthConfig(String tokenCodeKey, String tokenCode, String tokenUserKey, String tokenUser) {
+ super(tokenCodeKey, tokenCode, tokenUserKey, tokenUser);
+ }
+
+ @PluginFactory
+ public static RpcAuthConfig createRpcAuthConfig(@PluginAttribute("tokenCodeKey") String tokenCodeKey,
+ @PluginAttribute("tokenCode") String tokenCode,
+ @PluginAttribute("tokenUserKey") String tokenUserKey, @PluginAttribute("tokenUser") String tokenUser){
+ return new RpcAuthConfig(tokenCodeKey, tokenCode, tokenUserKey, tokenUser);
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java
new file mode 100644
index 000000000..f9dff1d10
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/RpcLogSenderConfig.java
@@ -0,0 +1,40 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginElement;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.util.Integers;
+
+/**
+ * Rpc sender configuration
+ */
+@Plugin(
+ name = "RpcLogSender",
+ category = "Core",
+ printObject = true
+)
+public class RpcLogSenderConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig {
+
+ public RpcLogSenderConfig(String address, int sendRetryCnt, int connectionTimeout, int socketTimeout, int serverRecoveryTimeInSec, int maxDelayTimeInSec,
+ RpcAuthConfig authConfig, SendLogCacheConfig cacheConfig, SendBufferConfig bufferConfig) {
+ super(address, sendRetryCnt, connectionTimeout, socketTimeout, serverRecoveryTimeInSec, maxDelayTimeInSec, authConfig, cacheConfig, bufferConfig);
+ }
+
+ @PluginFactory
+ public static RpcLogSenderConfig createConfig(
+ @PluginAttribute("address") String address, @PluginAttribute("sendRetryCnt") String sendRetryCnt,
+ @PluginAttribute("connectionTimeout") String connectionTimeout, @PluginAttribute("socketTimeout") String socketTimeout,
+ @PluginAttribute("serverRecoveryTimeInSec") String serverRecoveryTimeInSec, @PluginAttribute("maxDelayTimeInSec") String maxDelayTimeInSec,
+ @PluginAttribute("debugMode")String debugMode,
+ @PluginElement("AuthConfig")RpcAuthConfig authConfig, @PluginElement("SendLogCache") SendLogCacheConfig cacheConfig,
+ @PluginElement("SendBuffer")SendBufferConfig bufferConfig){
+ RpcLogSenderConfig config = new RpcLogSenderConfig(address, Integers.parseInt(sendRetryCnt, 3),
+ Integers.parseInt(connectionTimeout, 3000), Integers.parseInt(socketTimeout, 15000),
+ Integers.parseInt(serverRecoveryTimeInSec, 5), Integers.parseInt(maxDelayTimeInSec, 60),
+ authConfig, cacheConfig, bufferConfig);
+ config.setDebugMode(Boolean.parseBoolean(debugMode));
+ return config;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java
new file mode 100644
index 000000000..936accd72
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendBufferConfig.java
@@ -0,0 +1,28 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.util.Integers;
+
+@Plugin(
+ name = "SendBuffer",
+ category = "Core",
+ printObject = true
+)
+public class SendBufferConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendBufferConfig {
+
+ public SendBufferConfig() {
+ }
+
+ public SendBufferConfig(int size, long expireTimeInSec) {
+ super(size, expireTimeInSec);
+ }
+
+ @PluginFactory
+ public static SendBufferConfig createBufferConfig(
+ @PluginAttribute("size") String size, @PluginAttribute("expireTimeInSec") String expireTimeInSec){
+ return new SendBufferConfig(Integers.parseInt(size, 50),
+ Integers.parseInt(expireTimeInSec, 2));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java
new file mode 100644
index 000000000..f4a63c49c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/config/SendLogCacheConfig.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.config;
+
+import org.apache.logging.log4j.core.config.plugins.Plugin;
+import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
+import org.apache.logging.log4j.core.config.plugins.PluginFactory;
+import org.apache.logging.log4j.core.util.Integers;
+
+/**
+ * Cache config
+ */
+@Plugin(
+ name = "SendLogCache",
+ category = "Core",
+ printObject = true
+)
+public class SendLogCacheConfig extends com.webank.wedatasphere.streamis.jobmanager.log.collector.config.SendLogCacheConfig {
+
+ public SendLogCacheConfig(int size, int maxConsumeThread) {
+ super(size, maxConsumeThread);
+ }
+
+ @PluginFactory
+ public static SendLogCacheConfig createCacheConfig(
+ @PluginAttribute("size") String size, @PluginAttribute("maxConsumeThread") String maxConsumeThread){
+ return new SendLogCacheConfig(Integers.parseInt(size, 150), Integers.parseInt(maxConsumeThread, 10));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java
new file mode 100644
index 000000000..ea51fd67a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j2/filters/KeywordThresholdFilter.java
@@ -0,0 +1,86 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j2.filters;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.KeywordMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.Marker;
+import org.apache.logging.log4j.core.Filter;
+import org.apache.logging.log4j.core.LogEvent;
+import org.apache.logging.log4j.core.Logger;
+import org.apache.logging.log4j.core.filter.AbstractFilter;
+import org.apache.logging.log4j.message.Message;
+import org.slf4j.LoggerFactory;
+
+import java.util.Optional;
+
+/**
+ * Threshold filter with keyword
+ */
+public class KeywordThresholdFilter extends AbstractFilter implements LogMessageFilterAdapter {
+
+ private static final org.slf4j.Logger logger = LoggerFactory.getLogger(KeywordThresholdFilter.class);
+ /**
+ * Level
+ */
+ private final Level level;
+
+ /**
+ * Message filter
+ */
+ private final KeywordMessageFilter messageFilter;
+ public KeywordThresholdFilter(String[] acceptKeywords, String[] excludeKeywords){
+ // Use accept and deny match
+ super(Filter.Result.ACCEPT, Filter.Result.DENY);
+ // If accept keywords is empty, set the log level to warn
+ if (null == acceptKeywords || acceptKeywords.length <= 0){
+ this.level = Level.WARN;
+ logger.info("The keywords is empty, set the log threshold level >= " + this.level);
+ } else {
+ this.level = Level.ALL;
+ }
+ this.messageFilter = new KeywordMessageFilter(acceptKeywords, excludeKeywords);
+ }
+
+ @Override
+ public Result filter(LogEvent event) {
+ return filter(event.getLevel());
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, Message msg, Throwable t) {
+ return filter(level);
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, Object msg, Throwable t) {
+ return filter(level);
+ }
+
+ @Override
+ public Result filter(Logger logger, Level level, Marker marker, String msg, Object... params) {
+ return filter(level);
+ }
+
+ private Result filter(final Level level){
+ return level.isMoreSpecificThan(this.level) ? onMatch : onMismatch;
+ }
+
+ public Level getLevel() {
+ return level;
+ }
+
+ @Override
+ public String toString() {
+ return level.toString() +
+ "|acceptKeywords:[" +
+ Optional.ofNullable(this.messageFilter.getAcceptKeywords()).orElse(new String[]{}).length +
+ "]|excludeKeywords:[" +
+ Optional.ofNullable(this.messageFilter.getExcludeKeywords()).orElse(new String[]{}).length + "]" ;
+ }
+
+ @Override
+ public LogMessageFilter getLogMessageFilter() {
+ return this.messageFilter;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
new file mode 100644
index 000000000..0bc49c139
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
@@ -0,0 +1,29 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class StreamisLogAppenderTest {
+ private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class);
+ @Test
+ public void appenderLog() throws InterruptedException {
+ int total = 10000;
+ int tps = 1000;
+ long timer = System.currentTimeMillis() + 1000;
+ for (int i = 0; i < total; i++) {
+ if (i > 0 && i % tps == 0) {
+ long sleep = timer - System.currentTimeMillis();
+ if (sleep > 0) {
+ try {
+ Thread.sleep(sleep);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ timer = System.currentTimeMillis() + 1000;
+ }
+ LOG.info("ERROR: Stream Log appender test, sequence id: " + i);
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml
new file mode 100644
index 000000000..27aff1d6d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector/src/test/resources/log4j2.xml
@@ -0,0 +1,37 @@
+
+
+
+
+
+
+
+
+
+
+
+
+ `
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml
new file mode 100644
index 000000000..ae0b2fda7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/pom.xml
@@ -0,0 +1,56 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-collector1x
+
+
+ 8
+ 8
+ 1.2.17
+ 1.7.12
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector-core
+ ${streamis.version}
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
+ provided
+
+
+
+ log4j
+ log4j
+ ${log4j.version}
+ provided
+
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
new file mode 100644
index 000000000..e4b93d25b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisRpcLogAppender.java
@@ -0,0 +1,237 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.cache.LogCache;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.sender.StreamisRpcLogSender;
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvent;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.log4j.AppenderSkeleton;
+import org.apache.log4j.Level;
+import org.apache.log4j.SimpleLayout;
+import org.apache.log4j.helpers.LogLog;
+import org.apache.log4j.spi.LoggingEvent;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.*;
+import java.util.function.BiPredicate;
+
+/**
+ * Rpc appender for log4j1
+ */
+public class StreamisRpcLogAppender extends AppenderSkeleton {
+
+ /**
+ * Application name
+ */
+ private String applicationName;
+
+ private String filterEnable = "true";
+ /**
+ * Appender config
+ */
+ private StreamisLog4jAppenderConfig appenderConfig;
+
+ /**
+ * Rpc log sender
+ */
+ private StreamisRpcLogSender rpcLogSender;
+
+ /**
+ * Rpc log sender config
+ */
+ private RpcLogSenderConfig rpcLogSenderConfig = new RpcLogSenderConfig();
+
+
+ /**
+ * Cache
+ */
+ private LogCache logCache;
+
+ /**
+ * Filter function
+ */
+ private BiPredicate messageFilterFunction = (logger, message) -> false;
+
+ @Override
+ protected void append(LoggingEvent loggingEvent) {
+ String content = super.getLayout().format(loggingEvent);
+ if (messageFilterFunction.test(loggingEvent.getLoggerName(), content)) {
+ StreamisLogEvent logEvent = new StreamisLogEvent(content, loggingEvent.getTimeStamp());
+ if (Objects.nonNull(logCache)) {
+ try {
+ this.logCache.cacheLog(logEvent);
+ } catch (InterruptedException e) {
+ LogLog.error("StreamisRpcLogAppender: " + this.getName() +
+ " interrupted when cache the log into the RPC sender, message: " + e.getMessage());
+ }
+ }
+ }
+ }
+
+ @Override
+ public void close() {
+ if (Objects.nonNull(this.rpcLogSender)){
+ this.rpcLogSender.close();
+ }
+ }
+
+ @Override
+ public boolean requiresLayout() {
+ return true;
+ }
+
+ @Override
+ public void activateOptions() {
+ check();
+ // Search the config autowired class
+ List configAutowiredEntities = new ArrayList<>();
+ StreamisLog4jAppenderConfig logAppenderConfig = null;
+ ServiceLoader.load(StreamisConfigAutowired.class,
+ StreamisRpcLogAppender.class.getClassLoader()).iterator().forEachRemaining(configAutowiredEntities::add);
+ StreamisLog4jAppenderConfig.Builder builder = new StreamisLog4jAppenderConfig.Builder(this.applicationName,
+ getThreshold(), getFilter(), rpcLogSenderConfig);
+ for (StreamisConfigAutowired autowired : configAutowiredEntities){
+ try {
+ logAppenderConfig = (StreamisLog4jAppenderConfig) autowired.logAppenderConfig(builder);
+ } catch (Exception e) {
+ LogLog.warn("Unable to autowired the config from: " +autowired.getClass().getName(), e);
+ }
+ }
+ if (Objects.isNull(logAppenderConfig)){
+ logAppenderConfig = builder.build();
+ }
+ this.applicationName = logAppenderConfig.getApplicationName();
+ if (null == applicationName || applicationName.trim().equals("")){
+ throw new IllegalArgumentException("Application name cannot be empty");
+ }
+ this.appenderConfig = logAppenderConfig;
+ // Set the threshold to error default
+ setThreshold(Optional.ofNullable(logAppenderConfig.getThreshold()).orElse(Level.ERROR));
+ // First to clear the filters
+ clearFilters();
+ // Then to add filter
+ logAppenderConfig.getFilters().forEach(this::addFilter);
+ System.out.println("StreamisRpcLogAppender: init with config => " + logAppenderConfig);
+ this.rpcLogSender = new StreamisRpcLogSender(this.appenderConfig.getApplicationName(),
+ this.appenderConfig.getSenderConfig());
+ this.rpcLogSender.setExceptionListener((subject, t, message) ->
+ LogLog.error((null != subject? subject.getClass().getSimpleName() : "") + ": " + message, t));
+ this.logCache = this.rpcLogSender.getOrCreateLogCache();
+ List messageFilters = appenderConfig.getMessageFilters();
+ if (null != messageFilters && messageFilters.size() > 0){
+ messageFilterFunction = (logger, message) ->{
+ for(LogMessageFilter messageFilter : messageFilters){
+ if (!messageFilter.doFilter(logger, message)){
+ return false;
+ }
+ }
+ return true;
+ };
+ }
+ }
+
+ private void check(){
+ if (Objects.nonNull(this.logCache)){
+ return;
+ }
+ if (Objects.isNull(getLayout())){
+ setLayout(new SimpleLayout());
+ }
+ if (System.getProperty("filter.enable") == null){
+ System.setProperty("filter.enable", filterEnable);
+ }
+ }
+
+
+ public String getAppName() {
+ return applicationName;
+ }
+
+ /**
+ * Application name
+ * @param applicationName name
+ */
+ public void setAppName(String applicationName) {
+ this.applicationName = applicationName;
+ }
+
+ public String getFilterEnable() {
+ return filterEnable;
+ }
+
+ public void setFilterEnable(String filterEnable) {
+ this.filterEnable = filterEnable;
+ }
+
+ public void setRpcAddress(String address){
+ this.rpcLogSenderConfig.setAddress(address);
+ }
+
+ public void setRpcConnTimeout(int connectionTimeout){
+ this.rpcLogSenderConfig.setConnectionTimeout(connectionTimeout);
+ }
+
+ public void setRpcSocketTimeout(int socketTimeout){
+ this.rpcLogSenderConfig.setSocketTimeout(socketTimeout);
+ }
+ public void setRpcSendRetryCnt(int sendRetryCnt){
+ this.rpcLogSenderConfig.setSendRetryCnt(sendRetryCnt);
+ }
+
+ public void setRpcServerRecoveryTimeInSec(int serverRecoveryTimeInSec){
+ this.rpcLogSenderConfig.setServerRecoveryTimeInSec(serverRecoveryTimeInSec);
+ }
+
+ public void setRpcMaxDelayTimeInSec(int maxDelayTimeInSec){
+ this.rpcLogSenderConfig.setMaxDelayTimeInSec(maxDelayTimeInSec);
+ }
+ // Authentication
+ public void setRpcAuthTokenCodeKey(String tokenCodeKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCodeKey(tokenCodeKey);
+ }
+
+ public void setRpcAuthTokenUserKey(String tokenUserKey){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUserKey(tokenUserKey);
+ }
+
+ public void setRpcAuthTokenUser(String tokenUser){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenUser(tokenUser);
+ }
+
+ public void setRpcAuthTokenCode(String tokenCode){
+ this.rpcLogSenderConfig.getAuthConfig().setTokenCode(tokenCode);
+ }
+
+ // Cache configuration
+ public void setRpcCacheSize(int cacheSize){
+ this.rpcLogSenderConfig.getCacheConfig().setSize(cacheSize);
+ }
+
+ public void setRpcCacheMaxConsumeThread(int maxConsumeThread){
+ this.rpcLogSenderConfig.getCacheConfig().setMaxConsumeThread(maxConsumeThread);
+ }
+
+ // Buffer configuration
+ public void setRpcBufferSize(int bufferSize){
+ this.rpcLogSenderConfig.getBufferConfig().setSize(bufferSize);
+ }
+
+ public void setRpcBufferExpireTimeInSec(int expireTimeInSec){
+ this.rpcLogSenderConfig.getBufferConfig().setExpireTimeInSec(expireTimeInSec);
+ }
+
+ public void setDebugMode(boolean debugMode){
+ this.rpcLogSenderConfig.setDebugMode(debugMode);
+ }
+
+ public void setDiscard(boolean discard){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscard(discard);
+ }
+
+ public void setDiscardWindow(int window){
+ this.rpcLogSenderConfig.getCacheConfig().setDiscardWindow(window);
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java
new file mode 100644
index 000000000..f10bef451
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/StreamisLog4jAppenderConfig.java
@@ -0,0 +1,110 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.RpcLogSenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.log4j.Priority;
+import org.apache.log4j.spi.Filter;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Appender config for log4j1
+ */
+public class StreamisLog4jAppenderConfig extends StreamisLogAppenderConfig {
+
+ /**
+ * Filter in log4j1
+ */
+ private final List filters = new ArrayList<>();
+ /**
+ *
+ */
+ private final Priority threshold;
+
+ protected StreamisLog4jAppenderConfig(String applicationName, Priority threshold, List filters,
+ RpcLogSenderConfig rpcLogSenderConfig, List messageFilters) {
+ super(applicationName, rpcLogSenderConfig, messageFilters);
+ this.threshold = threshold;
+ this.filters.addAll(filters);
+ }
+
+ public static class Builder extends StreamisLogAppenderConfig.Builder{
+
+ /**
+ * Filter rules
+ */
+ private final List filters = new ArrayList<>();
+
+ /**
+ * Threshold
+ */
+ private Priority threshold;
+
+ public Builder(String applicationName, Priority threshold, Filter filter,RpcLogSenderConfig rpcLogSenderConfig) {
+ super(applicationName, rpcLogSenderConfig);
+ this.threshold = threshold;
+ if (Objects.nonNull(filter)) {
+ this.filters.add(filter);
+ }
+ }
+
+ public StreamisLog4jAppenderConfig.Builder setFilter(Filter filter){
+ this.filters.clear();
+ this.messageFilters.clear();
+ this.filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ public StreamisLog4jAppenderConfig.Builder withFilter(Filter filter){
+ filters.add(filter);
+ if (filter instanceof LogMessageFilterAdapter){
+ this.messageFilters.add(((LogMessageFilterAdapter) filter).getLogMessageFilter());
+ }
+ return this;
+ }
+
+ /**
+ * Set threshold
+ * @param threshold threshold
+ * @return builder
+ */
+ public StreamisLog4jAppenderConfig.Builder threshold(Priority threshold, boolean needMoreSpecific){
+ if (needMoreSpecific){
+ if (this.threshold == null || threshold.isGreaterOrEqual(this.threshold)){
+ this.threshold = threshold;
+ }
+ }else {
+ this.threshold = threshold;
+ }
+ return this;
+ }
+ public StreamisLog4jAppenderConfig build(){
+ return new StreamisLog4jAppenderConfig(applicationName, threshold, filters, rpcLogSenderConfig, messageFilters);
+ }
+ }
+
+ public List getFilters() {
+ return filters;
+ }
+
+ public Priority getThreshold() {
+ return threshold;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamisLog4jAppenderConfig{" +
+ "applicationName='" + applicationName + '\'' +
+ ", senderConfig=" + senderConfig +
+ ", filters=" + filters +
+ ", threshold=" + threshold +
+ '}';
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java
new file mode 100644
index 000000000..1fe60b308
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/log4j1/filters/KeywordAllMatchFilter.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.filters;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.KeywordMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.message.filters.LogMessageFilterAdapter;
+import org.apache.log4j.spi.Filter;
+import org.apache.log4j.spi.LoggingEvent;
+
+/**
+ * All match filter with keyword
+ */
+public class KeywordAllMatchFilter extends Filter implements LogMessageFilterAdapter {
+
+ /**
+ * Message filter
+ */
+ private final KeywordMessageFilter messageFilter;
+
+ public KeywordAllMatchFilter(String[] acceptKeywords, String[] excludeKeywords){
+ this.messageFilter = new KeywordMessageFilter(acceptKeywords, excludeKeywords);
+ }
+ @Override
+ public int decide(LoggingEvent event) {
+ return Filter.ACCEPT;
+ }
+
+ @Override
+ public LogMessageFilter getLogMessageFilter() {
+ return this.messageFilter;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
new file mode 100644
index 000000000..0dcca02c9
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/StreamisLogAppenderTest.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector;
+
+import org.apache.log4j.PropertyConfigurator;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class StreamisLogAppenderTest {
+ private static final Logger LOG = LoggerFactory.getLogger(StreamisLogAppenderTest.class);
+ @Test
+ public void appenderLog() throws InterruptedException {
+ PropertyConfigurator.configure(StreamisLogAppenderTest.class.getResource("/log4j.properties").getPath());
+ int total = 1000;
+ int tps = 100;
+ long timer = System.currentTimeMillis() + 1000;
+ for(int i = 0; i < total; i ++){
+ if (i > 0 && i % tps == 0){
+ long sleep = timer - System.currentTimeMillis();
+ if (sleep > 0){
+ Thread.sleep(sleep);
+ }
+ timer = System.currentTimeMillis() + 1000;
+ }
+ LOG.info("Stream Log appender test, sequence id: " + i);
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties
new file mode 100644
index 000000000..8801938ab
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/streamis-job-log-collector1x/src/test/resources/log4j.properties
@@ -0,0 +1,44 @@
+#
+# Copyright 2021 WeBank
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+### set log levels ###
+
+log4j.rootCategory=INFO,stream
+
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.Threshold=INFO
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+#log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n
+log4j.appender.console.layout.ConversionPattern= %d{ISO8601} %-5p (%t) %p %c{1} - %m%n
+
+log4j.appender.stream=com.webank.wedatasphere.streamis.jobmanager.log.collector.StreamisRpcLogAppender
+log4j.appender.stream.appName=stream_applicatioin
+log4j.appender.stream.Threshold=INFO
+log4j.appender.stream.filterEnable=false
+log4j.appender.stream.layout=org.apache.log4j.PatternLayout
+log4j.appender.stream.layout.ConversionPattern= %d{ISO8601} %-5p (%t) [%F:%M(%L)] - %m%n
+log4j.appender.stream.rpcConnTimeout=3000
+log4j.appender.stream.rpcSocketTimeout=15000
+log4j.appender.stream.rpcSendRetryCnt=3
+log4j.appender.stream.rpcServerRecoveryTimeInSec=5
+log4j.appender.stream.rpcMaxDelayTimeInSec=60
+log4j.appender.stream.rpcAuthTokenCodeKey=
+log4j.appender.stream.rpcAuthTokenUserKey=
+log4j.appender.stream.rpcAuthTokenUser=
+log4j.appender.stream.rpcAuthTokenCode=
+log4j.appender.stream.rpcCacheSize=200
+log4j.appender.stream.rpcCacheMaxConsumeThread=1
+log4j.appender.stream.rpcBufferSize=50
+log4j.appender.stream.rpcBufferExpireTimeInSec=2
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml
new file mode 100644
index 000000000..1c13a8eb7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/pom.xml
@@ -0,0 +1,75 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../../pom.xml
+
+ 4.0.0
+
+ xspark-streamis-log-collector
+
+
+ 8
+ 8
+ 1.2.17
+ 1.7.12
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-collector1x
+ ${streamis.version}
+
+
+
+ org.slf4j
+ slf4j-api
+ ${slf4j.version}
+ provided
+
+
+
+ org.slf4j
+ slf4j-log4j12
+ ${slf4j.version}
+ provided
+
+
+
+ log4j
+ log4j
+ ${log4j.version}
+ provided
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-assembly-plugin
+ 2.3
+
+
+ assemble
+
+ single
+
+
+ install
+
+
+
+
+ src/main/assembly/package.xml
+
+ false
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml
new file mode 100644
index 000000000..8da27bf2c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/assembly/package.xml
@@ -0,0 +1,19 @@
+
+
+ package
+
+
+ jar
+
+ false
+
+
+ /
+ true
+ runtime
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java
new file mode 100644
index 000000000..7819ac460
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/collector/spark/SparkStreamisConfigAutowired.java
@@ -0,0 +1,107 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.collector.spark;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.config.StreamisLogAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.StreamisLog4jAppenderConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.collector.log4j1.filters.KeywordAllMatchFilter;
+import com.webank.wedatasphere.streamis.jobmanager.log.utils.StringUtils;
+import com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired;
+import org.apache.log4j.Level;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Optional;
+/**
+ * Autoconfigure the streamis config in Spark environment
+ */
+public class SparkStreamisConfigAutowired implements StreamisConfigAutowired {
+
+
+ private static final String DEBUG_MODE = "log.debug.mode";
+
+ private static final String DISCARD_SWITCH = "log.discard";
+
+ private static final String DISCARD_WINDOW = "log.discard.window";
+
+ private static final String APP_NAME_CONFIG = "app.name";
+
+ private static final String SERVER_ADDRESS_CONFIG = "streamis.url";
+
+ private static final String COLLECTOR_URI_CONFIG = "streamis.log.collector.uri";
+
+ private static final String PROJECT_NAME_CONFIG = "project.name";
+
+ private static final String DEFAULT_COLLECTOR_URI = "/api/rest_j/v1/streamis/streamJobManager/log/collect/events";
+
+ private static final String FILTER_ENABLE = "filter.enable";
+
+ private static final String FILTER_KEYWORD = "filter.keywords";
+
+ private static final String FILTER_KEYWORD_EXCLUDE = "filter.keywords.exclude";
+ @Override
+ public StreamisLogAppenderConfig logAppenderConfig(StreamisLogAppenderConfig.Builder builder) throws IllegalAccessException {
+ // Load the config from system properties
+ String debugMode = System.getProperty(DEBUG_MODE, "false");
+ if (null != debugMode && debugMode.equals("true")){
+ builder.setDebugMode(true);
+ }
+ String discard = System.getProperty(DISCARD_SWITCH, "true");
+ if (null != discard && discard.equals("true")){
+ builder.setDiscard(true);
+ }
+ String discardWind = System.getProperty(DISCARD_WINDOW, "2");
+ if (null != discardWind){
+ try{
+ builder.setDiscardWindow(Integer.parseInt(discardWind));
+ } catch (Exception e){
+ // Ignore
+ }
+ }
+ Optional.ofNullable(System.getProperty(APP_NAME_CONFIG)).ifPresent(appName -> {
+ String projectName = System.getProperty(PROJECT_NAME_CONFIG);
+ if (null != projectName && !projectName.trim().equals("")){
+ appName = projectName + "." + appName;
+ }
+ System.out.println("Spark env to streamis: application name =>" + appName);
+ builder.setAppName(appName);
+ });
+ String serverAddress = System.getProperty(SERVER_ADDRESS_CONFIG);
+ if (null != serverAddress && !serverAddress.trim().equals("")){
+ if (serverAddress.endsWith("/")){
+ serverAddress = serverAddress.substring(0, serverAddress.length() - 1);
+ }
+ String collectorUri = System.getProperty(COLLECTOR_URI_CONFIG, DEFAULT_COLLECTOR_URI);
+ if (null != collectorUri && !collectorUri.trim().equals("")){
+ if (!collectorUri.startsWith("/")){
+ collectorUri = "/" + collectorUri;
+ }
+ serverAddress += collectorUri;
+ }
+ System.out.println("Spark env to streamis: server address =>" + serverAddress);
+ builder.setRpcAddress(serverAddress);
+ }
+ String user = System.getenv("USER");
+ if (null == user || user.trim().equals("")){
+ user = System.getProperty("user.name", "hadoop");
+ }
+ System.out.println("Spark env to streamis: log user =>" + user);
+ builder.setRpcAuthTokenUser(user);
+ // Set filter
+ boolean filterEnable = true;
+ try {
+ filterEnable = Boolean.parseBoolean(System.getProperty(FILTER_ENABLE, "true"));
+ }catch (Exception e){
+ // ignore
+ }
+ if (filterEnable && builder instanceof StreamisLog4jAppenderConfig.Builder){
+ StreamisLog4jAppenderConfig.Builder log4jBuilder = ((StreamisLog4jAppenderConfig.Builder) builder);
+ String[] acceptKeywords = StringUtils.convertStrToArray(System.getProperty(FILTER_KEYWORD, "ERROR"), ",");
+ KeywordAllMatchFilter keywordAllMatchFilter = new KeywordAllMatchFilter(acceptKeywords, StringUtils.convertStrToArray(System.getProperty(FILTER_KEYWORD_EXCLUDE), ","));
+ if (null == acceptKeywords || acceptKeywords.length <=0 ){
+ System.out.println("The keywords is empty, set the log threshold level >= " + Level.WARN);
+ log4jBuilder.threshold(Level.WARN, true);
+ }
+ log4jBuilder.setFilter(keywordAllMatchFilter);
+ }
+ return builder.build();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
new file mode 100644
index 000000000..dac2fcaed
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/job-log-collector/xspark-streamis-log-collector/src/main/resources/META-INF/services/com.webank.wedatasphere.streamis.jobmanager.plugin.StreamisConfigAutowired
@@ -0,0 +1 @@
+com.webank.wedatasphere.streamis.jobmanager.log.collector.spark.SparkStreamisConfigAutowired
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/pom.xml b/streamis-jobmanager/streamis-job-log/pom.xml
new file mode 100644
index 000000000..67d2098a4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/pom.xml
@@ -0,0 +1,29 @@
+
+
+
+ streamis-jobmanager
+ com.webank.wedatasphere.streamis
+ 0.3.0
+
+ 4.0.0
+
+ streamis-job-log
+ pom
+
+ job-log-collector/streamis-job-log-collector-core
+ job-log-collector/streamis-job-log-collector
+ job-log-collector/streamis-job-log-collector1x
+ job-log-collector/flink-streamis-log-collector
+ job-log-collector/xspark-streamis-log-collector
+ streamis-job-log-server
+ streamis-job-log-common
+
+
+
+ 8
+ 8
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml
new file mode 100644
index 000000000..63948d607
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/pom.xml
@@ -0,0 +1,29 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-common
+
+
+ 8
+ 8
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java
new file mode 100644
index 000000000..da3a7054b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/LogElement.java
@@ -0,0 +1,34 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.entities;
+
+
+/**
+ * Element defined of log
+ */
+public interface LogElement {
+
+ /**
+ * Sequence id
+ * @return seq id
+ */
+ int getSequenceId();
+
+ /**
+ * Log time
+ * @return log time
+ */
+ long getLogTimeStamp();
+
+ /**
+ * Get content
+ * @return content array
+ */
+ String[] getContents();
+
+ /**
+ * The importance of log
+ * 0: useless, 1: normal, 2:important
+ * @return
+ */
+ int mark();
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java
new file mode 100644
index 000000000..6f8645f77
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvent.java
@@ -0,0 +1,84 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.entities;
+
+
+import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+/**
+ * Log event for streamis
+ */
+public class StreamisLogEvent implements LogElement, Serializable {
+
+ /**
+ * Log time
+ */
+ private long logTimeInMills;
+
+ /**
+ * Log content
+ */
+ private String content;
+
+ /**
+ * Mark
+ */
+ private int mark;
+
+ public StreamisLogEvent(){
+
+ }
+ public StreamisLogEvent(String content, long logTimeInMills){
+ this.content = content;
+ this.logTimeInMills = logTimeInMills;
+ }
+ @Override
+ public int getSequenceId() {
+ return 0;
+ }
+
+ @Override
+ public long getLogTimeStamp() {
+ return this.logTimeInMills;
+ }
+
+ @Override
+ public String[] getContents() {
+ return new String[]{content};
+ }
+
+ public String getContent() {
+ return content;
+ }
+
+ @Override
+ public int mark() {
+ return this.mark;
+ }
+
+ public void setLogTimeStamp(long logTimeInMills) {
+ this.logTimeInMills = logTimeInMills;
+ }
+
+ public void setContent(String content) {
+ this.content = content;
+ }
+
+ public void setMark(int mark) {
+ this.mark = mark;
+ }
+
+ public void setSequenceId(int sequenceId){
+ // Ignore
+ }
+
+ public String toJson(){
+ return "{" +
+ "\"logTimeStamp\":" + logTimeInMills +
+ ",\"content\":" + (Objects.isNull(content)? null : "\"" + JsonTool.escapeStrValue(content) + "\"") +
+ ",\"sequenceId\":0"
+ + "}";
+
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java
new file mode 100644
index 000000000..f2843c8af
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/entities/StreamisLogEvents.java
@@ -0,0 +1,112 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.entities;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.json.JsonTool;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+public class StreamisLogEvents implements LogElement, Serializable {
+
+ /**
+ * Application name
+ */
+ private String appName;
+ /**
+ * Log time
+ */
+ private long logTimeInMills;
+
+ private StreamisLogEvent[] events;
+ public StreamisLogEvents(){
+
+ }
+ public StreamisLogEvents(String applicationName, StreamisLogEvent[] events){
+ this.appName = applicationName;
+ this.events = events;
+ long maxTime = -1;
+ StreamisLogEvent lastEvent = events[events.length - 1];
+ if (null == lastEvent) {
+ for (StreamisLogEvent event : events) {
+ long time = event.getLogTimeStamp();
+ if (time > maxTime) {
+ maxTime = time;
+ }
+ }
+ this.logTimeInMills = maxTime;
+ }else {
+ this.logTimeInMills = lastEvent.getLogTimeStamp();
+ }
+
+ }
+
+ @Override
+ public int getSequenceId() {
+ return 0;
+ }
+
+ @Override
+ public long getLogTimeStamp() {
+ return this.logTimeInMills;
+ }
+
+
+ @Override
+ public String[] getContents() {
+ String[] contents = new String[events.length];
+ for(int i = 0 ; i < contents.length; i++){
+ contents[i] = events[i].getContent();
+ }
+ return contents;
+ }
+
+ @Override
+ public int mark() {
+ return 1;
+ }
+
+ public String getAppName() {
+ return appName;
+ }
+
+ public StreamisLogEvent[] getEvents() {
+ return events;
+ }
+
+ public void setAppName(String appName) {
+ this.appName = appName;
+ }
+
+ public void setLogTimeStamp(long logTimeInMills) {
+ this.logTimeInMills = logTimeInMills;
+ }
+
+ public void setEvents(StreamisLogEvent[] events) {
+ this.events = events;
+ }
+
+ public void setSequenceId(int sequenceId){
+ // Ignore
+ }
+
+ public String toJson(){
+ return "{" +
+ "\"logTimeStamp\":" + logTimeInMills +
+ ",\"appName\":" + (Objects.isNull(appName)? null : "\"" + JsonTool.escapeStrValue(appName) + "\"") +
+ ",\"events\":[" +
+ (Objects.isNull(events) || events.length <=0 ? "" : joinEvents(events, ",") ) + "]" +
+ ",\"sequenceId\":0"
+ + "}";
+ }
+
+ private String joinEvents(StreamisLogEvent[] events, String separator){
+ StringBuilder builder = new StringBuilder();
+ for(int i = 0; i < events.length; i ++){
+ builder.append(events[i].toJson());
+ if (i < events.length - 1){
+ builder.append(separator);
+ }
+ }
+ return builder.toString();
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java
new file mode 100644
index 000000000..9d17492dd
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-common/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/json/JsonTool.java
@@ -0,0 +1,64 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.json;
+
+import java.util.Locale;
+
+public class JsonTool {
+ static final char[] HEX_DIGITS = new char[] {'0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F'};
+
+ private JsonTool(){}
+ /**
+ * Avoid the special char
+ * @param input input string
+ * @return output string
+ */
+ public static String escapeStrValue(String input){
+ char[] chars = input.toCharArray();
+ StringBuilder sb = new StringBuilder();
+ for (char c : chars) {
+ switch (c) {
+ case '\"':
+ sb.append("\\\"");
+ break;
+ case '\\':
+ sb.append("\\\\");
+ break;
+ case '/':
+ sb.append("\\/");
+ break;
+ case '\b':
+ sb.append("\\b");
+ break;
+ case '\f':
+ sb.append("\\f");
+ break;
+ case '\n':
+ sb.append("\\n");
+ break;
+ case '\r':
+ sb.append("\\r");
+ break;
+ case '\t':
+ sb.append("\\t");
+ break;
+ default:
+ sb.append((c < 32) ? escapeUnicode(c) : c);
+ }
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Escape unicode
+ * @param code char code
+ * @return escaped string
+ */
+ private static String escapeUnicode(int code){
+ if (code > 0xffff){
+ return "\\u" + Integer.toHexString(code).toUpperCase(Locale.ENGLISH);
+ } else {
+ return "\\u" + HEX_DIGITS[(code >> 12) & 15]
+ + HEX_DIGITS[(code >> 8) & 15] + HEX_DIGITS[(code >> 4) & 15] + HEX_DIGITS[code & 15];
+ }
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml
new file mode 100644
index 000000000..0c485c5a8
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/pom.xml
@@ -0,0 +1,37 @@
+
+
+
+ streamis-job-log
+ com.webank.wedatasphere.streamis
+ 0.3.0
+ ../pom.xml
+
+ 4.0.0
+
+ streamis-job-log-server
+
+
+ 8
+ 8
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-common
+ 0.3.0
+
+
+ org.apache.linkis
+ linkis-module
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java
new file mode 100644
index 000000000..f3f32e363
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/StreamisJobLogAutoConfiguration.java
@@ -0,0 +1,24 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.StreamisJobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.StorageThresholdDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.SimpleLoadBalancer;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
+import org.springframework.context.annotation.Bean;
+import org.springframework.context.annotation.Configuration;
+
+@Configuration
+public class StreamisJobLogAutoConfiguration {
+
+ @Bean(initMethod = "init", destroyMethod = "destroy")
+ @ConditionalOnMissingBean(JobLogStorage.class)
+ public JobLogStorage streamisJobLogStorage(){
+ StreamisJobLogStorage jobLogStorage = new StreamisJobLogStorage();
+ jobLogStorage.addLoadBalancer(new RoundRobinLoadBalancer());
+ jobLogStorage.addLoadBalancer(new SimpleLoadBalancer());
+ jobLogStorage.setBucketDriftPolicy(new StorageThresholdDriftPolicy());
+ return jobLogStorage;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java
new file mode 100644
index 000000000..f4a2fb069
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/config/StreamJobLogConfig.java
@@ -0,0 +1,67 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.config;
+
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.linkis.common.conf.TimeType;
+
+/**
+ * Store the configuration defined for job log
+ */
+public class StreamJobLogConfig {
+ private StreamJobLogConfig(){}
+
+ /**
+ * Set the log restful api as no-auth
+ */
+ public static final CommonVars NO_AUTH_REST = CommonVars.apply("wds.stream.job.log.restful.no-auth", false);
+
+ /**
+ * The threshold of log storage
+ */
+ public static final CommonVars STORAGE_THRESHOLD = CommonVars.apply("wds.stream.job.log.storage.threshold", 0.9);
+
+ /**
+ * Max weight of storage context
+ */
+ public static final CommonVars STORAGE_CONTEXT_MAX_WEIGHT = CommonVars.apply("wds.stream.job.log.storage.context.max-weight", 5);
+
+ /**
+ * Paths of storage context
+ */
+ public static final CommonVars STORAGE_CONTEXT_PATHS = CommonVars.apply("wds.stream.job.log.storage.context.paths", "/data/stream/log");
+
+ /**
+ * Bucket monitor name
+ */
+ public static final CommonVars BUCKET_MONITOR_NAME = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.name", "Log-Storage-Bucket-Monitor");
+
+ /**
+ * Bucket monitor interval
+ */
+ public static final CommonVars BUCKET_MONITOR_INTERVAL = CommonVars.apply("wds.stream.job.log.storage.bucket.monitor.interval", new TimeType("2m"));
+
+ /**
+ * Bucket max idle time
+ */
+ public static final CommonVars BUCKET_MAX_IDLE_TIME = CommonVars.apply("wds.stream.job.log.storage.bucket.max-idle-time", new TimeType("12h"));
+
+ /**
+ * Bucket root path
+ */
+ public static final CommonVars BUCKET_ROOT_PATH = CommonVars.apply("wds.stream.job.log.storage.bucket.root-path", "/data/stream/log");
+ /**
+ * Max active part size in bucket
+ */
+ public static final CommonVars BUCKET_MAX_ACTIVE_PART_SIZE = CommonVars.apply("wds.stream.job.log.storage.bucket.max-active-part-size", 100L);
+
+ /**
+ * Compression of part in bucket
+ */
+ public static final CommonVars BUCKET_PART_COMPRESS = CommonVars.apply("wds.stream.job.log.storage.bucket.part-compress", "gz");
+
+ /**
+ * Bucket layout
+ */
+ public static final CommonVars BUCKET_LAYOUT = CommonVars.apply("wds.stream.job.log.storage.bucket.layout", "%msg");
+
+ public static final CommonVars BUCKET_PART_HOLD_DAY = CommonVars.apply("wds.stream.job.log.storage.bucket.part-hold-day", 30);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java
new file mode 100644
index 000000000..8676c5778
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/entities/StreamisLogEvents.java
@@ -0,0 +1,12 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.entities;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+
+public class StreamisLogEvents extends com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents {
+
+ @Override
+ @JsonIgnore
+ public String[] getContents() {
+ return super.getContents();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java
new file mode 100644
index 000000000..56edc2dd3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/exception/StreamJobLogException.java
@@ -0,0 +1,29 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.exception;
+
+import org.apache.linkis.common.exception.ErrorException;
+import org.apache.linkis.common.exception.ExceptionLevel;
+import org.apache.linkis.common.exception.LinkisRuntimeException;
+
+/**
+ * Stream job log exception
+ */
+public class StreamJobLogException extends ErrorException {
+ public StreamJobLogException(int errCode, String desc) {
+ super(errCode, desc);
+ }
+ public StreamJobLogException(int errCode, String desc, Throwable t){
+ super(errCode, desc);
+
+ }
+ public static class Runtime extends LinkisRuntimeException{
+
+ public Runtime(int errCode, String desc) {
+ super(errCode, desc);
+ }
+
+ @Override
+ public ExceptionLevel getLevel() {
+ return ExceptionLevel.ERROR;
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java
new file mode 100644
index 000000000..f60d5b12f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/restful/JobLogRestfulApi.java
@@ -0,0 +1,64 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.restful;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.entities.StreamisLogEvents;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.service.StreamisJobLogService;
+import org.apache.commons.lang.StringUtils;
+import org.apache.linkis.server.Message;
+import org.apache.linkis.server.security.SecurityFilter;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.web.bind.annotation.RequestBody;
+import org.springframework.web.bind.annotation.RequestMapping;
+import org.springframework.web.bind.annotation.RequestMethod;
+import org.springframework.web.bind.annotation.RestController;
+
+import javax.annotation.Resource;
+import javax.servlet.http.HttpServletRequest;
+
+@RestController
+@RequestMapping(path = "/streamis/streamJobManager/log")
+public class JobLogRestfulApi {
+
+ private static final Logger LOG = LoggerFactory.getLogger(JobLogRestfulApi.class);
+ @Resource
+ private StreamisJobLogService streamisJobLogService;
+
+ @RequestMapping(value = "/collect/events", method = RequestMethod.POST)
+ public Message collectEvents(@RequestBody StreamisLogEvents events, HttpServletRequest request){
+ Message result;
+ try{
+ if (StringUtils.isBlank(events.getAppName())){
+ return Message.ok("Ignore the stream log events without application name");
+ }
+ String userName;
+ if (StreamJobLogConfig.NO_AUTH_REST.getValue()){
+ userName = request.getHeader("Token-User");
+ if (StringUtils.isBlank(userName)){
+ try {
+ userName = SecurityFilter.getLoginUsername(request);
+ }catch(Exception e){
+ LOG.error("获取登录用户失败. {}", e.getMessage(), e);
+ }
+ if (StringUtils.isBlank(userName)){
+ LOG.error("获取登录用户失败, 使用默认用户: hadoop");
+ userName = "hadoop";
+ }
+ }
+ }else {
+ userName = SecurityFilter.getLoginUsername(request);
+ if (StringUtils.isBlank(userName)) {
+ throw new StreamJobLogException(-1, "The request should has token user");
+ }
+ }
+ this.streamisJobLogService.store(userName, events);
+ result = Message.ok();
+ }catch (Exception e){
+ String message = "Fail to collect stream log events, message: " + e.getMessage();
+ result = Message.error(message);
+ }
+ return result;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java
new file mode 100644
index 000000000..8fea4dab6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/DefaultStreamisJobLogService.java
@@ -0,0 +1,35 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.service;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import org.springframework.stereotype.Service;
+
+import javax.annotation.PostConstruct;
+import javax.annotation.Resource;
+
+/**
+ * Default implement
+ */
+@Service
+public class DefaultStreamisJobLogService implements StreamisJobLogService{
+
+ @Resource
+ private JobLogStorage jobLogStorage;
+
+ private JobLogBucketConfig jobLogBucketConfig;
+
+ @PostConstruct
+ public void init(){
+ jobLogBucketConfig = new JobLogBucketConfig();
+ }
+ @Override
+ public void store(String user, StreamisLogEvents events) {
+ JobLogBucket jobLogBucket = jobLogStorage.getOrCreateBucket(user, events.getAppName(), jobLogBucketConfig);
+ // If cannot get log bucket, drop the events
+ if (null != jobLogBucket){
+ jobLogBucket.getBucketStorageWriter().write(events);
+ }
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java
new file mode 100644
index 000000000..e8f8bfe4e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/service/StreamisJobLogService.java
@@ -0,0 +1,16 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.service;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.StreamisLogEvents;
+
+/**
+ * Job log service
+ */
+public interface StreamisJobLogService {
+
+ /**
+ * Store log events
+ * @param user user own
+ * @param events events
+ */
+ void store(String user, StreamisLogEvents events);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java
new file mode 100644
index 000000000..7dad9924d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/JobLogStorage.java
@@ -0,0 +1,50 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContextListener;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.JobLogStorageLoadBalancer;
+
+/**
+ * Storage of job log
+ */
+public interface JobLogStorage {
+
+ /**
+ * Create buckets
+ * @param userName user own
+ * @param appName application name
+ * @param bucketConfig bucket config
+ * @return config
+ */
+ JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig);
+
+ /**
+ * Set bucket drift policy
+ * @param bucketDriftPolicy bucket drift policy
+ */
+ void setBucketDriftPolicy(JobLogBucketDriftPolicy bucketDriftPolicy);
+
+ /**
+ * Add context listener
+ * @param listener listener
+ */
+ void addContextListener(JobLogStorageContextListener listener);
+
+ /**
+ * Add load balancer
+ * @param loadBalancer load balancer
+ */
+ void addLoadBalancer(JobLogStorageLoadBalancer loadBalancer);
+ /**
+ * Init method
+ */
+ void init() throws StreamJobLogException;
+
+ /**
+ * Destroy method
+ */
+ void destroy();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java
new file mode 100644
index 000000000..9ac620d5d
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/StreamisJobLogStorage.java
@@ -0,0 +1,328 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketState;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.*;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.JobLogStorageLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils.MemUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.linkis.common.utils.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import javax.annotation.PostConstruct;
+import javax.annotation.PreDestroy;
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.text.SimpleDateFormat;
+import java.util.*;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.Future;
+import java.util.concurrent.TimeUnit;
+
+import static com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig.BUCKET_MONITOR_INTERVAL;
+
+/**
+ * Job log storage
+ */
+public class StreamisJobLogStorage implements JobLogStorage{
+
+ private static final Logger logger = LoggerFactory.getLogger(StreamisJobLogStorage.class);
+
+ /**
+ * Storage context
+ */
+ private final List storageContexts = new CopyOnWriteArrayList<>();
+
+ /**
+ * Drift policy
+ */
+ private JobLogBucketDriftPolicy bucketDriftPolicy;
+ /**
+ * Buckets
+ */
+ private final Map buckets = new ConcurrentHashMap<>();
+
+ /**
+ * Context listeners
+ */
+ private final List contextListeners = new ArrayList<>();
+
+ /**
+ * Load balancer
+ */
+ private final List loadBalancers = new ArrayList<>();
+
+ /**
+ * Constructor cache
+ */
+ private final Map> bucketConstructors = new ConcurrentHashMap<>();
+
+ /**
+ * To monitor the status of buckets
+ */
+ private Future> monitorThread;
+
+ @Override
+ public JobLogBucket getOrCreateBucket(String userName, String appName, JobLogBucketConfig bucketConfig) {
+ String bucketName = toBucketName(userName, appName);
+ return buckets.computeIfAbsent(bucketName, name -> {
+ // First to choose context
+ JobLogStorageContext context = chooseStorageContext(bucketName, bucketConfig);
+ if (null != context){
+ Class extends JobLogBucket> bucketClass = bucketConfig.getBucketClass();
+ if (Objects.nonNull(bucketClass)) {
+ Constructor> constructor = bucketConstructors.computeIfAbsent(bucketClass.getName(), className -> {
+ Constructor>[] constructors = bucketClass.getConstructors();
+ Constructor> matchConstructor = null;
+ for (Constructor> constructor1 : constructors) {
+ Class>[] inputParams = constructor1.getParameterTypes();
+ if (inputParams.length >= 3 && inputParams[0].equals(String.class)
+ && inputParams[1].equals(JobLogStorageContext.class) && inputParams[2].equals(JobLogBucketConfig.class)) {
+ matchConstructor = constructor1;
+ break;
+ }
+ }
+ return matchConstructor;
+ });
+ if (Objects.nonNull(constructor)) {
+ try {
+ return (JobLogBucket) constructor.newInstance(bucketName, context, bucketConfig);
+ } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
+ logger.warn("Cannot create storage log bucket from [{}]", bucketClass.getName(), e);
+ }
+ }
+ }
+ }
+ return null;
+ });
+ }
+
+ @Override
+ public void setBucketDriftPolicy(JobLogBucketDriftPolicy bucketDriftPolicy) {
+ this.bucketDriftPolicy = bucketDriftPolicy;
+ }
+
+ @Override
+ public void addContextListener(JobLogStorageContextListener listener) {
+ this.contextListeners.add(listener);
+ }
+
+ @Override
+ public void addLoadBalancer(JobLogStorageLoadBalancer loadBalancer) {
+ this.loadBalancers.add(loadBalancer);
+ if (loadBalancer instanceof JobLogStorageContextListener){
+ addContextListener((JobLogStorageContextListener) loadBalancer);
+ }
+ }
+
+ @Override
+ @PostConstruct
+ public synchronized void init() throws StreamJobLogException {
+ initStorageContexts(StringUtils.split(StreamJobLogConfig.STORAGE_CONTEXT_PATHS.getValue(), ","));
+ onContextEvent(new ContextLaunchEvent(new ArrayList<>(this.storageContexts)));
+ // Init load balancer
+ initLoadBalancers();
+ if (Objects.isNull(monitorThread)){
+ monitorThread = Utils.defaultScheduler().scheduleAtFixedRate(() -> {
+ String threadName = Thread.currentThread().getName();
+ try {
+ Thread.currentThread().setName(StreamJobLogConfig.BUCKET_MONITOR_NAME.getValue());
+ long maxIdleTime = StreamJobLogConfig.BUCKET_MAX_IDLE_TIME.getValue().toLong();
+ SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
+ // Update the storage context
+ JobLogStorageContext[] contexts = this.storageContexts.toArray(new JobLogStorageContext[0]);
+ try {
+ updateContextWeight(contexts);
+ onContextEvent(new ContextRefreshAllEvent());
+ } catch (IOException e) {
+ logger.warn("Unable to calculate weight array of storage context list", e);
+ }
+ if (buckets.size() > 0) {
+ StringBuilder builder = new StringBuilder("Buckets(").append(buckets.size()).append(") in LogStorage: [\n");
+ buckets.forEach((bucketName, bucket) -> {
+ JobLogBucketState bucketState = bucket.getBucketState();
+ builder.append("bucket: [ name: ")
+ .append(bucketName)
+ .append(", path: ").append(bucketState.getBucketPath())
+ .append(", parts: ").append(bucketState.getBucketParts())
+ .append(", write-rate: ").append(bucketState.getBucketWriteRate()).append("/s")
+ .append(", last-write-time: ").append(dateFormat.format(bucketState.getBucketWriteTime()))
+ .append(" ]\n");
+ boolean closeBucket = false;
+ if (bucketState.getBucketWriteTime() + maxIdleTime <= System.currentTimeMillis()) {
+ logger.info("Close the idle bucket: [ name: {}, last-write-time: {} ]",
+ bucketName, dateFormat.format(bucketState.getBucketWriteTime()));
+ closeBucket = true;
+ }
+ if (Objects.nonNull(bucketDriftPolicy) && bucketDriftPolicy.onPolicy(bucket, contexts)){
+ logger.info("Drift the bucket: [ name: {}, last-write-time: {} ]", bucketName,
+ dateFormat.format(bucketState.getBucketWriteTime()));
+ closeBucket = true;
+ }
+ if (closeBucket) {
+ // Delete the bucket
+ // First to move the bucket from map, then close it
+ buckets.remove(bucketName);
+ bucket.close();
+ }
+ });
+ logger.info(builder.toString());
+ }
+ } catch (Throwable e){
+ assert logger != null;
+ logger.warn("Some exception happened in monitor thread", e);
+ //Ignore
+ } finally {
+ Thread.currentThread().setName(threadName);
+ }
+
+ },BUCKET_MONITOR_INTERVAL.getValue().toLong(), BUCKET_MONITOR_INTERVAL.getValue().toLong(), TimeUnit.MILLISECONDS);
+ }
+ }
+
+
+ @Override
+ @PreDestroy
+ public void destroy() {
+ // Fist to close all the bucket
+ buckets.forEach((bucketName, bucket) -> bucket.close());
+ if (null != monitorThread){
+ monitorThread.cancel(true);
+ }
+ }
+
+ /**
+ * Choose storage context
+ * @param bucketName bucket name
+ * @param jobLogBucketConfig bucket config
+ * @return storage context
+ */
+ private JobLogStorageContext chooseStorageContext(String bucketName, JobLogBucketConfig jobLogBucketConfig){
+ JobLogStorageContext context;
+ for(JobLogStorageLoadBalancer balancer : loadBalancers){
+ context = balancer.chooseContext(bucketName, jobLogBucketConfig);
+ if (null != context){
+ return context;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Init load balancers
+ */
+ private void initLoadBalancers(){
+ for(JobLogStorageLoadBalancer loadBalancer : this.loadBalancers){
+ loadBalancer.init();
+ }
+ // Sort the load balancer
+ this.loadBalancers.sort(Comparator.comparingInt(JobLogStorageLoadBalancer::priority).reversed());
+ }
+ /**
+ * Init the storage context
+ * @param storagePaths storage paths
+ */
+ private void initStorageContexts(String[] storagePaths) throws StreamJobLogException {
+ logger.info("Init the storage context: [" + StringUtils.join(storagePaths, ",") + "]");
+ for(String storagePath : storagePaths){
+ if (StringUtils.isNotBlank(storagePath)) {
+ this.storageContexts.add(new JobLogStorageContext(storagePath, 1.0));
+ }
+ }
+ if (!this.storageContexts.isEmpty()) {
+ int size = this.storageContexts.size();
+ try {
+ updateContextWeight(storageContexts.toArray(new JobLogStorageContext[size]));
+ } catch (IOException e) {
+ throw new StreamJobLogException(-1, "Unable to calculate weight array of storage context list", e);
+ }
+ }
+ }
+
+ private void updateContextWeight(JobLogStorageContext[] contexts) throws IOException {
+ double[] weights = calculateContextWeight(contexts);
+ StringBuilder builder = new StringBuilder("Update storage context weights:[\n");
+ for(int i = 0 ; i < weights.length; i ++){
+ JobLogStorageContext context = contexts[i];
+ builder.append(context.getStorePath()).append(" => ").append(weights[i]);
+ if (i != weights.length - 1){
+ builder.append(", ");
+ }
+ context.setStoreWeight(weights[i]);
+ }
+ builder.append("\n]");
+ logger.info(builder.toString());
+ }
+ /**
+ * Calculate the base weight of storage context
+ * @param contexts context array
+ */
+ private double[] calculateContextWeight(JobLogStorageContext[] contexts) throws IOException {
+ double[] weights = new double[contexts.length];
+ if (contexts.length > 0) {
+ int maxNormalizeWt = StreamJobLogConfig.STORAGE_CONTEXT_MAX_WEIGHT.getValue();
+ double storageThreshold = StreamJobLogConfig.STORAGE_THRESHOLD.getValue();
+ if (maxNormalizeWt < 1){
+ maxNormalizeWt = 1;
+ }
+ double maxWeight = Double.MIN_VALUE;
+ double minWeight = Double.MAX_VALUE;
+ int i = 0;
+ for (; i < weights.length; i++) {
+ JobLogStorageContext context = contexts[0];
+ long usableSpace = context.getUsableSpace();
+ long totalSpace = context.getTotalSpace();
+ double usage = (double)(totalSpace - usableSpace) / (double)totalSpace;
+ double weight = 0d;
+ if (usage >= storageThreshold){
+ logger.warn("The usage of storage context:[{}] reach the threshold: {} > {}, set the weight of it to 0",
+ context.getStorePath(), usage, storageThreshold);
+ } else {
+ long freeSpaceInGB = MemUtils.convertToGB(usableSpace, "B");
+ if (freeSpaceInGB <= 0) {
+ freeSpaceInGB = 1;
+ }
+ weight = context.getScore() * (double) freeSpaceInGB;
+ }
+ weights[i] = weight;
+ if (weight > maxWeight){
+ maxWeight = weight;
+ }
+ if (weight < minWeight){
+ minWeight = weight;
+ }
+ }
+ double sub = maxWeight - minWeight;
+ i = i - 1;
+ for (; i >= 0; i--){
+ weights[i] = (sub > 0? (maxNormalizeWt - 1) * (weights[i] - minWeight) * sub : 0) + 1;
+ }
+ }
+ return weights;
+ }
+
+ /**
+ * Produce context event
+ * @param event event
+ */
+ private void onContextEvent(JobLogStorageContextListener.ContextEvent event){
+ for(JobLogStorageContextListener listener : contextListeners){
+ listener.onContextEvent(event);
+ }
+ }
+ /**
+ * Bucket name
+ * @param userName username
+ * @param appName app name
+ * @return bucket name
+ */
+ private String toBucketName(String userName, String appName){
+ return userName + "." + appName;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java
new file mode 100644
index 000000000..463edab76
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucket.java
@@ -0,0 +1,36 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+/**
+ * Job log bucket for streamis
+ */
+public interface JobLogBucket {
+
+ /**
+ * Bucket state
+ * @return state
+ */
+ JobLogBucketState getBucketState();
+
+ /**
+ * Storage writer
+ * @return storage writer
+ */
+ JobLogStorageWriter getBucketStorageWriter();
+
+ /**
+ * Get storage context
+ * @return context
+ */
+ JobLogStorageContext getStorageContext();
+ /**
+ * Bucket name
+ * @return bucket name
+ */
+ String getBucketName();
+ /**
+ * Close the bucket
+ */
+ void close();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java
new file mode 100644
index 000000000..f101c5649
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketConfig.java
@@ -0,0 +1,112 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import org.apache.linkis.common.conf.CommonVars;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Configuration for job log bucket
+ */
+public class JobLogBucketConfig {
+
+ @SuppressWarnings("unchecked")
+ public JobLogBucketConfig(){
+ try {
+ Class> defaultBucketClass = Class.forName(Define.JOB_LOG_BUCKET_CLASS.getValue());
+ if (JobLogBucket.class.isAssignableFrom(defaultBucketClass)){
+ this.bucketClass = (Class extends JobLogBucket>) defaultBucketClass;
+ }
+ } catch (ClassNotFoundException e) {
+ throw new StreamJobLogException.Runtime(-1, "Cannot find the bucket class, message: " + e.getMessage());
+ }
+ }
+
+ /**
+ * Bucket class
+ */
+ private Class extends JobLogBucket> bucketClass;
+
+ /**
+ * Attribute
+ */
+ protected Map attributes = new HashMap<>();
+
+ /**
+ * Max size of bucket active part (MB)
+ */
+ private long maxBucketActivePartSize = StreamJobLogConfig.BUCKET_MAX_ACTIVE_PART_SIZE.getValue();
+
+ /**
+ * The compress format used for bucket parts
+ */
+ private String bucketPartCompress = StreamJobLogConfig.BUCKET_PART_COMPRESS.getValue();
+
+ /**
+ * Max hold time in days for bucket part
+ */
+ private int bucketPartHoldTimeInDay = StreamJobLogConfig.BUCKET_PART_HOLD_DAY.getValue();
+
+ /**
+ * Layout pattern
+ */
+ private String logLayOutPattern = StreamJobLogConfig.BUCKET_LAYOUT.getValue();
+
+ public Class extends JobLogBucket> getBucketClass() {
+ return bucketClass;
+ }
+
+ public void setBucketClass(Class extends JobLogBucket> bucketClass) {
+ this.bucketClass = bucketClass;
+ }
+
+ public Map getAttributes() {
+ return attributes;
+ }
+
+ public void setAttributes(Map attributes) {
+ this.attributes = attributes;
+ }
+
+ public long getMaxBucketActivePartSize() {
+ return maxBucketActivePartSize;
+ }
+
+ public void setMaxBucketActivePartSize(long maxBucketActivePartSize) {
+ this.maxBucketActivePartSize = maxBucketActivePartSize;
+ }
+
+ public String getBucketPartCompress() {
+ return bucketPartCompress;
+ }
+
+ public void setBucketPartCompress(String bucketPartCompress) {
+ this.bucketPartCompress = bucketPartCompress;
+ }
+
+ public int getBucketPartHoldTimeInDay() {
+ return bucketPartHoldTimeInDay;
+ }
+
+ public void setBucketPartHoldTimeInDay(int bucketPartHoldTimeInDay) {
+ this.bucketPartHoldTimeInDay = bucketPartHoldTimeInDay;
+ }
+
+ public String getLogLayOutPattern() {
+ return logLayOutPattern;
+ }
+
+ public void setLogLayOutPattern(String logLayOutPattern) {
+ this.logLayOutPattern = logLayOutPattern;
+ }
+
+ public static final class Define{
+ /**
+ * Default bucket class
+ */
+ private Define(){}
+ public static final CommonVars JOB_LOG_BUCKET_CLASS = CommonVars.apply("wds.streamis.job.log.bucket.class", "com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.Log4j2JobLogBucket");
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java
new file mode 100644
index 000000000..147f8fafe
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketDriftPolicy.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+/**
+ * Drift policy
+ */
+public interface JobLogBucketDriftPolicy {
+ /**
+ * Decide whether you should drift the bucket
+ * @param bucket bucket
+ * @return
+ */
+ boolean onPolicy(JobLogBucket bucket, JobLogStorageContext[] contexts);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java
new file mode 100644
index 000000000..d4b9b6b2a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketFactory.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+/**
+ * Factory of creating job log bucket
+ */
+public interface JobLogBucketFactory {
+
+ /**
+ * Create bucket
+ * @param jobName job name
+ * @param config bucket config
+ * @return
+ */
+ JobLogBucket createBucket(String jobName, JobLogBucketConfig config);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java
new file mode 100644
index 000000000..8051e6d13
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogBucketState.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+/**
+ * State of log bucket
+ */
+public interface JobLogBucketState {
+
+ /**
+ * Bucket path
+ * @return path
+ */
+ String getBucketPath();
+
+ /**
+ * Write rate
+ * @return rate
+ */
+ double getBucketWriteRate();
+
+ /**
+ * Bucket parts
+ * @return number
+ */
+ int getBucketParts();
+
+ /**
+ * Last rite time
+ * @return time
+ */
+ long getBucketWriteTime();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java
new file mode 100644
index 000000000..772040374
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/JobLogStorageWriter.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+
+/**
+ * Storage writer for job log
+ */
+public interface JobLogStorageWriter {
+
+ /**
+ * Write log element
+ * @param logEl elements
+ * @param
+ */
+ void write(LogElement logEl);
+
+ /**
+ * Write log line
+ * @param logLine log line
+ */
+ void write(String logLine);
+
+ /**
+ * Close log storage
+ */
+ void close();
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java
new file mode 100644
index 000000000..186377e31
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/Log4j2JobLogBucket.java
@@ -0,0 +1,353 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.entities.LogElement;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.linkis.common.conf.CommonVars;
+import org.apache.logging.log4j.Level;
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
+import org.apache.logging.log4j.core.appender.rolling.*;
+import org.apache.logging.log4j.core.appender.rolling.action.*;
+import org.apache.logging.log4j.core.config.AppenderRef;
+import org.apache.logging.log4j.core.config.Configuration;
+import org.apache.logging.log4j.core.config.LoggerConfig;
+import org.apache.logging.log4j.core.layout.PatternLayout;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.Optional;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Job log bucket for log4j
+ */
+public class Log4j2JobLogBucket implements JobLogBucket{
+
+ private static final org.slf4j.Logger LOG = LoggerFactory.getLogger(Log4j2JobLogBucket.class);
+
+ private static final String DEFAULT_FILE_PATTERN_SUFFIX = ".%d{yyyy-MM-dd}-%i";
+
+ private static final CommonVars ROLLOVER_MAX = CommonVars.apply("wds.stream.job.log.storage.bucket.log4j.rollover-max", 20);
+ /**
+ * Bucket name
+ */
+ private final String bucketName;
+
+ /**
+ * Logger name
+ */
+ private final String loggerName;
+ /**
+ * Logger context
+ */
+ private final LoggerContext loggerContext;
+
+ /**
+ * Logger entity
+ */
+ private final Logger logger;
+
+ /**
+ * Storage context
+ */
+ private final JobLogStorageContext storageContext;
+ /**
+ * Storage writer
+ */
+ private final JobLogStorageWriter jobLogStorageWriter;
+
+ /**
+ * Bucket state
+ */
+ private final JobLogBucketState jobLogBucketState;
+
+ /**
+ * Last write time;
+ */
+ private long lastWriteTime;
+
+ /**
+ * Prev Interval time
+ */
+ private long preIntervalTime;
+
+ /**
+ * Active thread
+ */
+ private final AtomicLong activeThread = new AtomicLong(0);
+ /**
+ * Interval counter
+ */
+ private final AtomicLong intervalCounter = new AtomicLong(0);
+
+ /**
+ * Shutdown flag
+ */
+ private final AtomicBoolean isShutdown = new AtomicBoolean(false);
+
+ /**
+ * Shutdown lock
+ */
+ private final ReentrantLock shutdownLock = new ReentrantLock();
+
+ /**
+ * Shutdown condition
+ */
+ private final Condition canShutdown = shutdownLock.newCondition();
+ /**
+ * Store the write rate
+ */
+ private double writeRate;
+ public Log4j2JobLogBucket(String bucketName, JobLogStorageContext storageContext, JobLogBucketConfig config){
+ this.bucketName = bucketName;
+ // Build unique logger name
+ this.loggerName = bucketName + System.currentTimeMillis() + "_" + Thread.currentThread().getId();
+ this.storageContext = storageContext;
+ // Create logger context
+ this.loggerContext = (LoggerContext) LogManager.getContext(false);
+ this.logger = initLogger(this.bucketName, this.loggerName, this.storageContext, config, this.loggerContext);
+ this.jobLogStorageWriter = createStorageWriter();
+ this.jobLogBucketState = createBucketState();
+ }
+ @Override
+ public JobLogBucketState getBucketState() {
+ return this.jobLogBucketState;
+ }
+
+ @Override
+ public JobLogStorageWriter getBucketStorageWriter() {
+ return this.jobLogStorageWriter;
+ }
+
+ @Override
+ public JobLogStorageContext getStorageContext() {
+ return this.storageContext;
+ }
+
+ @Override
+ public String getBucketName() {
+ return this.bucketName;
+ }
+
+ @Override
+ public void close() {
+ this.isShutdown.set(true);
+ this.shutdownLock.lock();
+ try{
+ if (activeThread.get() > 0 && !this.canShutdown.await(5, TimeUnit.SECONDS)) {
+ LOG.warn("Shutdown the bucket: [{}] directly because the timeout of waiting", bucketName);
+ }
+ } catch (InterruptedException e) {
+ // Ignore
+
+ } finally {
+ this.shutdownLock.unlock();
+ }
+ Configuration log4jConfig = this.loggerContext.getConfiguration();
+ // First to stop appender
+ log4jConfig.getAppender(this.loggerName).stop();
+ log4jConfig.getLoggerConfig(this.loggerName).removeAppender(this.loggerName);
+ log4jConfig.removeLogger(this.loggerName);
+ loggerContext.updateLoggers();
+ }
+
+ private synchronized Logger initLogger(String bucketName, String loggerName,
+ JobLogStorageContext storageContext, JobLogBucketConfig config, LoggerContext loggerContext){
+ Configuration log4jConfig = loggerContext.getConfiguration();
+ String fileName = resolveFileName(storageContext.getStorePath().toString(), bucketName);
+ RollingFileAppender appender = RollingFileAppender.newBuilder()
+ .setLayout(PatternLayout.newBuilder().withPattern(config.getLogLayOutPattern()).build())
+ .setName(loggerName)
+// .withFileOwner()
+ .withFileName(fileName)
+ .withFilePattern(resolveFilePattern(fileName, config.getBucketPartCompress()))
+ .withPolicy(SizeBasedTriggeringPolicy.createPolicy(config.getMaxBucketActivePartSize() + "MB"))
+ .withStrategy(createRolloverStrategy(log4jConfig, fileName, ROLLOVER_MAX.getValue(), config.getBucketPartHoldTimeInDay()))
+ .setConfiguration(log4jConfig)
+ .build();
+ appender.start();
+ log4jConfig.addAppender(appender);
+ LoggerConfig loggerConfig = LoggerConfig.newBuilder().withAdditivity(false).withLevel(Level.ALL)
+ .withRefs(new AppenderRef[]{
+ AppenderRef.createAppenderRef(loggerName, null, null)
+ })
+ .withLoggerName(loggerName).withConfig(log4jConfig).build();
+ loggerConfig.addAppender(appender, null, null);
+ log4jConfig.addLogger(loggerName, loggerConfig);
+ // Should we update the logger context ?
+ loggerContext.updateLoggers();
+ return loggerContext.getLogger(loggerName);
+ }
+
+ /**
+ * Create storage writer
+ * @return storage writer
+ */
+ private JobLogStorageWriter createStorageWriter(){
+ return new JobLogStorageWriter() {
+ @Override
+ public void write(LogElement logEl) {
+ activeThread.incrementAndGet();
+ try {
+ String[] contents = logEl.getContents();
+ if (null != contents) {
+ for (String content : contents) {
+ write(content, true);
+ }
+ }
+ }finally {
+ if (activeThread.decrementAndGet() <= 0 && isShutdown.get()){
+ notifyShutdown();
+ }
+ }
+ }
+
+ @Override
+ public void write(String logLine) {
+ activeThread.incrementAndGet();
+ try {
+ write(logLine, false);
+ }finally {
+ if (activeThread.decrementAndGet() <= 0 && isShutdown.get()){
+ notifyShutdown();
+ }
+ }
+ }
+
+ private void write(String logLine, boolean batch){
+ logger.info(logLine);
+ long currentTime = System.currentTimeMillis();
+ long intervalCnt = intervalCounter.getAndIncrement();
+ long intervalTime = (currentTime - preIntervalTime)/1000;
+ // Per minute accumulate the rate
+ if ( intervalTime >= 60){
+ writeRate = (double)intervalCnt / (double)intervalTime;
+ preIntervalTime = currentTime;
+ intervalCounter.set(0);
+ }
+ lastWriteTime = currentTime;
+ }
+ @Override
+ public void close() {
+ // Ignore
+ }
+ };
+ }
+
+ /**
+ * Create bucket state
+ * @return bucket state
+ */
+ private JobLogBucketState createBucketState(){
+ return new JobLogBucketState() {
+ private String bucketPath;
+ @Override
+ public String getBucketPath() {
+ if (StringUtils.isBlank(bucketPath)) {
+ Appender appender = loggerContext.getConfiguration().getAppender(loggerName);
+ if (appender instanceof RollingFileAppender) {
+ bucketPath = new File(((RollingFileAppender) appender).getFileName()).getParent();
+ }
+ }
+ return this.bucketPath;
+ }
+
+ @Override
+ public double getBucketWriteRate() {
+ return writeRate;
+ }
+
+ @Override
+ public int getBucketParts() {
+ AtomicInteger parts = new AtomicInteger(-1);
+ String path = getBucketPath();
+ if (StringUtils.isNotBlank(path)){
+ Optional.ofNullable(new File(path).list()).ifPresent(list -> parts.set(list.length));
+ }
+ return parts.get();
+ }
+
+ @Override
+ public long getBucketWriteTime() {
+ return lastWriteTime;
+ }
+ };
+ }
+
+ private synchronized void notifyShutdown(){
+ this.shutdownLock.lock();
+ try{
+ this.canShutdown.notifyAll();
+ }finally {
+ this.shutdownLock.unlock();
+ }
+ }
+ /**
+ * Create rollover strategy
+ * @param configuration configuration
+ * @param fileName file name
+ * @param rolloverMax rollover max inf file pattern
+ * @param fileHoldDay file hold day time
+ * @return strategy
+ */
+ private RolloverStrategy createRolloverStrategy(Configuration configuration,
+ String fileName, int rolloverMax, int fileHoldDay){
+ DefaultRolloverStrategy.Builder builder = DefaultRolloverStrategy.newBuilder();
+ if (rolloverMax > 0){
+ builder.withMax(rolloverMax + "");
+ }
+ if (fileHoldDay > 0){
+ // Create the actions to delete old file
+ builder.withCustomActions(new Action[]{
+ DeleteAction.createDeleteAction(new File(fileName).getParent(), false, 2, false, null,
+ new PathCondition[]{
+ IfFileName.createNameCondition(null, ".*"),
+ IfLastModified.createAgeCondition(Duration.parse(fileHoldDay + "d"))
+ },
+ null, configuration)
+ }
+ );
+ }
+ return builder.build();
+ }
+ /**
+ * Ex: /data/stream/log/hadoop/{projectName}/{jobName}/{projectName}.{jobName}.log
+ * @param bucketRootPath bucket root path
+ * @param bucketName bucket name
+ * @return file name with absolute path
+ */
+ private String resolveFileName(String bucketRootPath, String bucketName){
+ String fileName = FilenameUtils.normalize(bucketName);
+ String basePath = bucketRootPath;
+ if (!basePath.endsWith("/")){
+ basePath += "/";
+ }
+ basePath += fileName.replace(".", "/");
+ return basePath + "/" + fileName.substring(bucketName.indexOf(".") + 1) + ".log";
+ }
+
+ /**
+ * Resolve file pattern
+ * @param fileName file name
+ * @param format format
+ * @return file pattern
+ */
+ private String resolveFilePattern(String fileName, String format){
+ String filePattern = fileName + Log4j2JobLogBucket.DEFAULT_FILE_PATTERN_SUFFIX;
+ if (StringUtils.isNotBlank(format)){
+ filePattern = filePattern + (format.startsWith(".") ? format : "." +format);
+ }
+ return filePattern;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java
new file mode 100644
index 000000000..608faa75e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/bucket/StorageThresholdDriftPolicy.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+public class StorageThresholdDriftPolicy implements JobLogBucketDriftPolicy{
+ @Override
+ public boolean onPolicy(JobLogBucket bucket, JobLogStorageContext[] contexts) {
+ JobLogStorageContext bucketContext = bucket.getStorageContext();
+ // Means that the storage context is not healthy
+ if (bucketContext.getStoreWeight() <= 0){
+ // Find the available context
+ boolean hasRest = false;
+ for(JobLogStorageContext context : contexts){
+ if (context.getStoreWeight() > 0){
+ hasRest = true;
+ break;
+ }
+ }
+ return hasRest;
+ }
+ return false;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java
new file mode 100644
index 000000000..c1964376e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextDownEvent.java
@@ -0,0 +1,20 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+/**
+ * Means that the storage context has been downed
+ */
+public class ContextDownEvent implements JobLogStorageContextListener.ContextEvent {
+
+ /**
+ * Context id
+ */
+ private final String contextId;
+
+ public ContextDownEvent(String contextId){
+ this.contextId = contextId;
+ }
+
+ public String getContextId() {
+ return contextId;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java
new file mode 100644
index 000000000..59de63d6e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextLaunchEvent.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+import java.util.List;
+
+/**
+ * Means that the storage context has been launched
+ */
+public class ContextLaunchEvent implements JobLogStorageContextListener.ContextEvent {
+
+ /**
+ * Storage contexts
+ */
+ private final List contexts;
+
+ public ContextLaunchEvent(List contexts){
+ this.contexts = contexts;
+ }
+
+ public List getContextList() {
+ return contexts;
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java
new file mode 100644
index 000000000..b585e5718
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/ContextRefreshAllEvent.java
@@ -0,0 +1,8 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+/**
+ * Just a sign that to refresh all the storage context
+ */
+public class ContextRefreshAllEvent implements JobLogStorageContextListener.ContextEvent {
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java
new file mode 100644
index 000000000..9ffd95226
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContext.java
@@ -0,0 +1,144 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+
+import java.io.IOException;
+import java.nio.file.*;
+import java.nio.file.attribute.PosixFilePermissions;
+import java.util.UUID;
+
+/**
+ * Storage context (represent the driver/disk)
+ */
+public class JobLogStorageContext{
+
+ /**
+ * Context id
+ */
+ private final String id;
+
+ /**
+ * Store path
+ */
+ private final Path storePath;
+
+ /**
+ * Store information
+ */
+ private final FileStore storeInfo;
+ /**
+ * Score of storage context
+ */
+ private final double score;
+
+ /**
+ * Storage weight
+ */
+ private double storeWeight;
+
+ public JobLogStorageContext(String path, double score){
+ this.id = UUID.randomUUID().toString();
+ this.storePath = Paths.get(path);
+ this.storeInfo = initStorePath(this.storePath);
+ this.score = score;
+ }
+
+
+ private FileStore initStorePath(Path path){
+ if (Files.notExists(path)){
+ try {
+ Files.createDirectories(this.storePath,
+ PosixFilePermissions.asFileAttribute(PosixFilePermissions.fromString("rwxrwxr--")));
+ } catch (IOException e) {
+ throw new StreamJobLogException.Runtime(-1,
+ "Cannot make the storage path directory: [" + path + "], message: " + e.getMessage());
+ }
+ // Allow dir link
+ } else if (!Files.isDirectory(path)){
+ throw new StreamJobLogException.Runtime(-1,
+ "the storage path: [" + path + "] is not directory" );
+ }
+ try {
+ return Files.getFileStore(path);
+ } catch (IOException e) {
+ throw new StreamJobLogException.Runtime(-1,
+ "Fail to get the storage information in path: [" + path + "], message: " + e.getMessage());
+ }
+ }
+
+ public Path getStorePath() {
+ return storePath;
+ }
+
+ /**
+ * Score
+ * @return score value
+ */
+ public double getScore() {
+ return score;
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ /**
+ * Total space
+ * @return bytes return
+ * @throws IOException
+ */
+ public long getTotalSpace() throws IOException {
+ long result = storeInfo.getTotalSpace();
+ if (result < 0){
+ result = Long.MAX_VALUE;
+ }
+ return result;
+ }
+
+ /**
+ * Usable space
+ * @return bytes return
+ * @throws IOException
+ */
+ public long getUsableSpace() throws IOException {
+ long result = storeInfo.getUsableSpace();
+ if (result < 0){
+ result = Long.MAX_VALUE;
+ }
+ return result;
+ }
+
+ /**
+ * Unallocated space
+ * @return bytes return
+ * @throws IOException
+ */
+ public long getUnallocatedSpace() throws IOException{
+ long result = storeInfo.getUnallocatedSpace();
+ if (result < 0){
+ result = Long.MAX_VALUE;
+ }
+ return result;
+ }
+
+ public double getStoreWeight() {
+ return storeWeight;
+ }
+
+ public void setStoreWeight(double storeWeight) {
+ this.storeWeight = storeWeight;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof JobLogStorageContext){
+ return this.id.equals(((JobLogStorageContext) o).id);
+ }
+ return super.equals(o);
+ }
+
+ @Override
+ public int hashCode() {
+ return this.id.hashCode();
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java
new file mode 100644
index 000000000..77432a2b2
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/context/JobLogStorageContextListener.java
@@ -0,0 +1,17 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context;
+
+/**
+ * Context listener
+ */
+public interface JobLogStorageContextListener {
+
+ /**
+ * Listen the context event
+ * @param event event
+ */
+ void onContextEvent(ContextEvent event);
+
+ interface ContextEvent{
+
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java
new file mode 100644
index 000000000..06d6186d7
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/JobLogStorageLoadBalancer.java
@@ -0,0 +1,27 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+
+public interface JobLogStorageLoadBalancer {
+ /**
+ * Init method
+ */
+ void init();
+
+ /**
+ * The order
+ * @return priority value
+ */
+ default int priority(){
+ return -1;
+ }
+
+ /**
+ * Choose storage context
+ * @param bucketName bucket name
+ * @param config bucket config
+ * @return
+ */
+ JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config);
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java
new file mode 100644
index 000000000..ef12087cb
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/RoundRobinLoadBalancer.java
@@ -0,0 +1,199 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.List;
+import java.util.concurrent.locks.ReentrantLock;
+
+
+/**
+ * Round-robin load balancer
+ */
+public class RoundRobinLoadBalancer implements JobLogStorageLoadBalancer, JobLogStorageContextListener {
+
+ private static final Logger LOG = LoggerFactory.getLogger(RoundRobinLoadBalancer.class);
+
+ /**
+ * Candidate array
+ */
+ private StorageContextInfo[] candidates = new StorageContextInfo[0];
+
+ /**
+ * Lock for candidate array
+ */
+ private final ReentrantLock candidateLock = new ReentrantLock();
+ @Override
+ public void onContextEvent(ContextEvent event) {
+ if (event instanceof ContextLaunchEvent){
+ onLaunchContexts(((ContextLaunchEvent) event).getContextList());
+ } else if (event instanceof ContextDownEvent){
+ onDownContext(((ContextDownEvent) event).getContextId());
+ } else if (event instanceof ContextRefreshAllEvent){
+ onRefreshAllContext();
+ }
+ }
+
+ @Override
+ public int priority() {
+ return Integer.MAX_VALUE;
+ }
+
+ @Override
+ public void init() {
+ //init
+ }
+
+ @Override
+ public JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config) {
+ updateCandidateContextWeight();
+ candidateLock.lock();
+ try {
+ int index = selectContext(candidates);
+ if (index >= 0){
+ StorageContextInfo info = this.candidates[index];
+ info.cwt = info.cwt -1;
+ LOG.info("Round-Robin chosen context: {} for bucket: {}", info.context.getStorePath(), bucketName);
+ return info.context;
+ }
+ }finally {
+ candidateLock.unlock();
+ }
+ return null;
+ }
+
+ private static class StorageContextInfo{
+
+ /**
+ * Storage context
+ */
+ final JobLogStorageContext context;
+
+ /**
+ * If the context is working
+ */
+ boolean online = true;
+
+ /**
+ * Weight value
+ */
+ int wt;
+
+ /**
+ * Dynamic weight
+ */
+ int cwt;
+
+ public StorageContextInfo(JobLogStorageContext context){
+ this.context = context;
+ this.wt = (int)Math.floor(context.getStoreWeight());
+ this.cwt = wt;
+ }
+
+ public void refreshWeight(){
+ this.wt = (int)Math.floor(context.getStoreWeight());
+ if (this.cwt > this.wt){
+ this.cwt = this.wt;
+ }
+ }
+ }
+
+ /**
+ * Select context
+ * @param infoArray info array
+ * @return index
+ */
+ private int selectContext(StorageContextInfo[] infoArray){
+ int u = 0;
+ int reset = -1;
+ while (true){
+ for (int i = 0; i < infoArray.length; i ++){
+ if (!infoArray[i].online || infoArray[i].cwt <= 0){
+ continue;
+ }
+ u = i;
+ while (i < infoArray.length - 1){
+ i ++;
+ if (!infoArray[i].online || infoArray[i].cwt <= 0){
+ continue;
+ }
+ if ((infoArray[u].wt * 1000 / infoArray[i].wt <
+ infoArray[u].cwt * 1000 / infoArray[i].cwt)){
+ return u;
+ }
+ u = i;
+ }
+ return u;
+ }
+ if (++reset > 0){
+ return -1;
+ }
+ for (StorageContextInfo info : infoArray){
+ info.cwt = info.wt;
+ }
+ }
+
+ }
+ /**
+ * Enlarge the candidate array of context info
+ * @param contexts context list
+ */
+ private void onLaunchContexts(List contexts){
+ if (contexts.size() > 0){
+ candidateLock.lock();
+ try{
+ StorageContextInfo[] source = candidates;
+ int newSize = source.length + contexts.size();
+ StorageContextInfo[] dest = new StorageContextInfo[newSize];
+ System.arraycopy(source, 0, dest, 0, source.length);
+ int offset = source.length;
+ for(JobLogStorageContext context : contexts){
+ dest[offset++] = new StorageContextInfo(context);
+ }
+ this.candidates = dest;
+ }finally {
+ candidateLock.unlock();
+ }
+ }
+ }
+
+ /**
+ * Mark the context has been downed
+ * @param contextId context id
+ */
+ private void onDownContext(String contextId){
+ // Need to lock the array ?
+ candidateLock.lock();
+ try{
+ for (StorageContextInfo info : candidates) {
+ if (contextId.equals(info.context.getId())) {
+ info.online = false;
+ return;
+ }
+ }
+ } finally {
+ candidateLock.unlock();
+ }
+ }
+
+ /**
+ * Refresh all the context
+ */
+ private void onRefreshAllContext(){
+ candidateLock.lock();
+ try{
+ // Update the dynamic weight
+ for (StorageContextInfo info : candidates) {
+ info.refreshWeight();
+ }
+ } finally {
+ candidateLock.unlock();
+ }
+ }
+ private void updateCandidateContextWeight(){
+ // Empty method
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java
new file mode 100644
index 000000000..42b20133e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/loadbalancer/SimpleLoadBalancer.java
@@ -0,0 +1,51 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.ContextDownEvent;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.ContextLaunchEvent;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContextListener;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.security.SecureRandom;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Simple load balancer
+ */
+public class SimpleLoadBalancer implements JobLogStorageLoadBalancer, JobLogStorageContextListener {
+
+ private static final Logger LOG = LoggerFactory.getLogger(SimpleLoadBalancer.class);
+
+ private final List contexts = new ArrayList<>();
+
+ private final SecureRandom random = new SecureRandom();
+ @Override
+ public void onContextEvent(ContextEvent event) {
+ if (event instanceof ContextLaunchEvent){
+ contexts.addAll(((ContextLaunchEvent) event).getContextList());
+ } else if (event instanceof ContextDownEvent){
+ contexts.removeIf(context -> context.getId().equals(((ContextDownEvent) event).getContextId()));
+ }
+ }
+
+ @Override
+ public void init() {
+ //init
+ }
+
+ @Override
+ public JobLogStorageContext chooseContext(String bucketName, JobLogBucketConfig config) {
+ JobLogStorageContext context = randomSelectContext(this.contexts);
+ if (null != context){
+ LOG.info("Random chosen context: {} for bucket: {}", context.getStorePath(), bucketName);
+ }
+ return context;
+ }
+
+ private JobLogStorageContext randomSelectContext(List candidates){
+ return candidates.get(random.nextInt(candidates.size()));
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java
new file mode 100644
index 000000000..8ef2f3ec6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/log/server/storage/utils/MemUtils.java
@@ -0,0 +1,234 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils;
+
+import org.apache.commons.lang.StringUtils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Mem utils
+ */
+public class MemUtils {
+ private static final Map UNIT_MAP = new HashMap<>();
+ static{
+ UNIT_MAP.put("G", StoreUnit.GB);
+ UNIT_MAP.put("GB", StoreUnit.GB);
+ UNIT_MAP.put("B", StoreUnit.B);
+ UNIT_MAP.put("M", StoreUnit.MB);
+ UNIT_MAP.put("MB", StoreUnit.MB);
+ UNIT_MAP.put("K", StoreUnit.KB);
+ UNIT_MAP.put("KB", StoreUnit.KB);
+ }
+ public static long convertToGB(long size, String unitFlag){
+ if(size < 0){
+ return -1L;
+ }
+ if(StringUtils.isNotBlank(unitFlag)){
+ StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase());
+ if(null != storeUnit){
+ return storeUnit.toGB(size);
+ }
+ }
+ return -1L;
+ }
+
+ public static long convertToMB(long size, String unitFlag){
+ if(size < 0){
+ return -1L;
+ }
+ if(StringUtils.isNotBlank(unitFlag)){
+ StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase());
+ if(null != storeUnit){
+ return storeUnit.toMB(size);
+ }
+ }
+ return -1L;
+ }
+
+ public static long convertToByte(long size, String unitFlag){
+ if(size < 0){
+ return -1L;
+ }
+ if(StringUtils.isNotBlank(unitFlag)){
+ StoreUnit storeUnit = UNIT_MAP.get(unitFlag.trim().toUpperCase());
+ if(null != storeUnit){
+ return storeUnit.toB(size);
+ }
+ }
+ return -1L;
+ }
+ public enum StoreUnit {
+ /**
+ * byte
+ */
+ B {
+ @Override
+ public long toB(long s){
+ return s;
+ }
+
+ @Override
+ public long toKB(long s){
+ return s/(C1/C0);
+ }
+
+ @Override
+ public long toMB(long s) {
+ return s/(C2/C0);
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s/(C3/C0);
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C0);
+ }
+ },
+ /**
+ * kb
+ */
+ KB{
+ @Override
+ public long toB(long s){
+ return x(s, C1/C0, Long.MAX_VALUE/(C1/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return s;
+ }
+
+ @Override
+ public long toMB(long s) {
+ return s/(C2/C1);
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s/(C3/C1);
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C0);
+ }
+ },
+ MB{
+ @Override
+ public long toB(long s){
+ return x(s, C2/C0, Long.MAX_VALUE/(C2/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return x(s, C2/C1, Long.MAX_VALUE/(C2/C1));
+ }
+
+ @Override
+ public long toMB(long s) {
+ return s;
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s/(C3/C2);
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C2);
+ }
+ },
+ GB{
+ @Override
+ public long toB(long s){
+ return x(s, C3/C0, Long.MAX_VALUE/(C3/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return x(s, C3/C1, Long.MAX_VALUE/(C3/C1));
+ }
+
+ @Override
+ public long toMB(long s) {
+ return x(s, C3/C2, Long.MAX_VALUE/(C3/C2));
+ }
+
+ @Override
+ public long toGB(long s) {
+ return s;
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s/(C4/C3);
+ }
+ },
+ TB{
+ @Override
+ public long toB(long s){
+ return x(s, C4/C0, Long.MAX_VALUE/(C4/C0));
+ }
+
+ @Override
+ public long toKB(long s){
+ return x(s, C4/C1, Long.MAX_VALUE/(C4/C1));
+ }
+
+ @Override
+ public long toMB(long s) {
+ return x(s, C4/C2, Long.MAX_VALUE/(C4/C2));
+ }
+
+ @Override
+ public long toGB(long s) {
+ return x(s, C4/C3, Long.MAX_VALUE/(C4/C3));
+ }
+
+ @Override
+ public long toTB(long s) {
+ return s;
+ }
+ };
+
+ public long toB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toKB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toMB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toGB(long s){
+ throw new AbstractMethodError();
+ }
+
+ public long toTB(long s){
+ throw new AbstractMethodError();
+ }
+ }
+
+ static long x(long d, long m, long over){
+ if(d > over){
+ return Long.MAX_VALUE;
+ }
+ if(d < -over){
+ return Long.MIN_VALUE;
+ }
+ return d * m;
+ }
+ static final long C0 = 1L;
+ static final long C1 = C0 * 1024L;
+ static final long C2 = C1 * 1024L;
+ static final long C3 = C2 * 1024L;
+ static final long C4 = C3 * 1024L;
+
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java
new file mode 100644
index 000000000..fb3d600a1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-log/streamis-job-log-server/src/test/java/com/webank/wedatasphere/streamis/jobmanager/log/server/JobLogStorageTest.java
@@ -0,0 +1,104 @@
+package com.webank.wedatasphere.streamis.jobmanager.log.server;
+
+import com.webank.wedatasphere.streamis.jobmanager.log.server.config.StreamJobLogConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.exception.StreamJobLogException;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.JobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.StreamisJobLogStorage;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucket;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.JobLogBucketConfig;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.bucket.StorageThresholdDriftPolicy;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.context.JobLogStorageContext;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.RoundRobinLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.loadbalancer.SimpleLoadBalancer;
+import com.webank.wedatasphere.streamis.jobmanager.log.server.storage.utils.MemUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.linkis.common.conf.BDPConfiguration;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.Objects;
+
+public class JobLogStorageTest {
+
+ private static final Logger logger = LoggerFactory.getLogger(JobLogStorageTest.class);
+ @Test
+ public void storageContext() throws IOException {
+ URL url = JobLogStorageTest.class.getResource("/");
+ if (null != url){
+ JobLogStorageContext context = new JobLogStorageContext(url.getPath(), 1.0d);
+ logger.info("disk total(bytes): " + context.getTotalSpace());
+ logger.info("disk total(gb): " + MemUtils.convertToGB(context.getTotalSpace(), "B"));
+ logger.info("disk usable(bytes): " + context.getUsableSpace());
+ logger.info("disk usable(gb): " + MemUtils.convertToGB(context.getUsableSpace(), "B"));
+ }
+ }
+ @Test
+ public void calculateWeight() throws IOException {
+ JobLogStorageContext candidate1 = new JobLogStorageContext(Objects.requireNonNull(JobLogStorage.class.getResource("/"))
+ .getPath(), 1.0d);
+ JobLogStorageContext candidate2 = new JobLogStorageContext(Objects.requireNonNull(JobLogStorage.class.getResource("/"))
+ .getPath(), 1.0d);
+ JobLogStorageContext[] contexts = new JobLogStorageContext[]{candidate1, candidate2};
+ double[] weights = new double[contexts.length];
+ int maxNormalizeWt = StreamJobLogConfig.STORAGE_CONTEXT_MAX_WEIGHT.getValue();
+ double storageThreshold = StreamJobLogConfig.STORAGE_THRESHOLD.getValue();
+ if (maxNormalizeWt < 1){
+ maxNormalizeWt = 1;
+ }
+ double maxWeight = Double.MIN_VALUE;
+ double minWeight = Double.MAX_VALUE;
+ int i = 0;
+ for (; i < weights.length; i++) {
+ JobLogStorageContext context = contexts[0];
+ long usableSpace = context.getUsableSpace();
+ long totalSpace = context.getTotalSpace();
+ double usage = (double)(totalSpace - usableSpace) / (double)totalSpace;
+ double weight = 0d;
+ if (usage >= storageThreshold){
+ logger.info("The usage of storage context:[{}] reach the threshold: {} > {}, set the weight of it to 0",
+ context.getStorePath(), usage, storageThreshold);
+ } else {
+ long freeSpaceInGB = MemUtils.convertToGB(usableSpace, "B");
+ if (freeSpaceInGB <= 0) {
+ freeSpaceInGB = 1;
+ }
+ weight = context.getScore() * (double) freeSpaceInGB;
+ }
+ weights[i] = weight;
+ if (weight > maxWeight){
+ maxWeight = weight;
+ }
+ if (weight < minWeight){
+ minWeight = weight;
+ }
+ }
+ double sub = maxWeight - minWeight;
+ i = i - 1;
+ for (; i >= 0; i--){
+ weights[i] = (sub > 0? (maxNormalizeWt - 1) * (weights[i] - minWeight) * sub : 0) + 1;
+ }
+ logger.info(StringUtils.join(weights, '|'));
+ }
+
+ @Test
+ public void startLogStorage() throws InterruptedException, StreamJobLogException {
+ BDPConfiguration.set("wds.stream.job.log.storage.context.paths", Objects.requireNonNull(JobLogStorage.class.getResource("/"))
+ .getPath());
+ JobLogStorage storage = createJobLogStorage();
+ storage.init();
+ JobLogBucket bucket = storage.getOrCreateBucket("hadoop", "test-app", new JobLogBucketConfig());
+ bucket.getBucketStorageWriter().write("Hello world");
+ Thread.sleep(1000);
+ storage.destroy();
+ }
+ private JobLogStorage createJobLogStorage(){
+ StreamisJobLogStorage jobLogStorage = new StreamisJobLogStorage();
+ jobLogStorage.addLoadBalancer(new RoundRobinLoadBalancer());
+ jobLogStorage.addLoadBalancer(new SimpleLoadBalancer());
+ jobLogStorage.setBucketDriftPolicy(new StorageThresholdDriftPolicy());
+ return jobLogStorage;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/pom.xml b/streamis-jobmanager/streamis-job-manager/pom.xml
index 8c80a3520..748a289de 100755
--- a/streamis-jobmanager/streamis-job-manager/pom.xml
+++ b/streamis-jobmanager/streamis-job-manager/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml
index f0ac3a5c2..0dc9bf21e 100755
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
../../pom.xml
4.0.0
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamFileMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamFileMapper.java
new file mode 100644
index 000000000..c3561703f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamFileMapper.java
@@ -0,0 +1,8 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.dao;
+
+/**
+ * Mapper of stream file(material)
+ */
+public interface StreamFileMapper {
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java
index c380ca954..b0d35c32d 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/StreamJobMapper.java
@@ -26,10 +26,13 @@
public interface StreamJobMapper {
List getJobLists(@Param("projectName") String projectName, @Param("userName") String userName, @Param("name") String name,
- @Param("status") Integer status, @Param("createBy") String createBy);
+ @Param("status") Integer status, @Param("createBy") String createBy, @Param("label") String label, @Param("manageModeKey") String manageModeKey, @Param("jobNameList") List jobNameList);
+
+ List getJobVersionDetails(@Param("jobId") Long jobId);
StreamJob getJobById(@Param("jobId") Long jobId);
+ List getJobByName(@Param("jobName") String jobName);
List getJobVersions(@Param("jobId") Long jobId);
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml
index 71e2b38a6..e9df41237 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamJobMapper.xml
@@ -41,9 +41,12 @@
+
+
+
+
@@ -72,11 +76,11 @@
- `id`,`project_name`, `workspace_name`,`name`,`create_by`, `create_time`,`label`,`description`,`submit_user`,`job_type`
+ `id`,`project_name`, `workspace_name`,`name`,`create_by`, `create_time`,`label`,`description`,`submit_user`,`job_type`, `current_version`, `status`
- `id`,`job_id`,`version`,`source`,`job_content`,`comment`,`create_time`,`create_by`
+ `id`,`job_id`,`version`,`source`, `manage_mode`, `job_content`,`comment`,`create_time`,`create_by`
@@ -86,22 +90,28 @@
+
+ SELECT FROM
+ linkis_stream_job WHERE name = #{jobName}
+
+
- select * from (
+ select *, (SELECT jc.value FROM linkis_stream_job_config jc WHERE jc.job_id = aa.id and jc.key = #{manageModeKey} limit 1) AS manage_mode,
+ (SELECT COUNT(1) FROM linkis_stream_job_version v WHERE v.job_id = aa.id AND v.id > aa.version_id) AS version_forward,
+ (SELECT v.version FROM linkis_stream_job_version v WHERE v.job_id = aa.id ORDER BY v.id DESC LIMIT 1) AS last_version from (
SELECT j.`id`,j.`project_name`, j.`workspace_name`,j.`name`,j.`create_by`, j.`create_time`,j.`label`,j.`description`,
- j.`job_type`, (CASE WHEN t.`status` IS NULL THEN 0 ELSE t.status END) AS STATUS, ll.version AS VERSION,
- ll.create_time AS lastVersionTime, t.start_time, lsp.id AS project_id FROM
+ j.`job_type`, (CASE WHEN t.`status` IS NULL THEN 0 ELSE t.status END) AS status, j.`current_version` AS version, v.id as version_id,
+ v.create_time AS lastVersionTime, t.start_time, lsp.id AS project_id FROM
linkis_stream_job j
- LEFT JOIN (SELECT id,job_id, MAX(VERSION) VERSION FROM linkis_stream_job_version GROUP BY job_id) l ON j.id = l.job_id
- LEFT JOIN linkis_stream_job_version ll ON l.job_id=ll.job_id AND l.version=ll.version
- LEFT JOIN (SELECT MAX(start_time) start_time, MAX(VERSION) VERSION,job_id FROM linkis_stream_task GROUP BY job_id) tt
- ON tt.job_id = l.job_id AND tt.version = l.version
- LEFT JOIN linkis_stream_task t ON tt.job_id = t.job_id AND tt.start_time = t.start_time
+ LEFT JOIN linkis_stream_job_version v ON v.job_id = j.id AND v.version = j.current_version
+ LEFT JOIN (SELECT MAX(id) id, job_id, version FROM linkis_stream_task GROUP BY job_id,version) gt
+ ON gt.job_id = j.id AND gt.version = v.version
+ LEFT JOIN linkis_stream_task t ON t.id = gt.id
LEFT JOIN linkis_stream_project lsp ON lsp.name = j.project_name
) aa
@@ -111,14 +121,26 @@
AND aa.`name` like concat('%', #{name}, '%')
+
+ AND aa.`name` in
+
+ #{item}
+
+
AND aa.`status`=#{status}
AND aa.`create_by` = #{createBy}
+
+ AND binary aa.`label` like concat('%', #{label}, '%')
+
AND EXISTS (SELECT 1 FROM linkis_stream_project_privilege lspp WHERE lspp.project_id =aa.project_id
- AND lspp.privilege in (1,2,3) AND lspp.user_name = #{userName})
+
+ AND lspp.user_name = #{userName}
+
+ AND lspp.privilege in (1,2,3))
order by aa.start_time desc
@@ -130,7 +152,7 @@
- SELECT * FROM linkis_stream_job_version WHERE job_id=#{jobId} ORDER BY version desc LIMIT 1
+ SELECT * FROM linkis_stream_job_version WHERE job_id=#{jobId} ORDER BY id desc,version desc LIMIT 1
@@ -150,19 +172,24 @@
WHERE v.job_id = j.id AND j.id = #{jobId} AND v.version=#{version}
+
+ SELECT j.id,v.version,j.description,DATE_FORMAT(v.create_time,"%Y-%m-%d %H:%i:%s") AS releaseTime,j.create_by AS createBy,j.project_name
+ FROM `linkis_stream_job` j INNER JOIN linkis_stream_job_version v
+ ON j.id = #{jobId} AND v.job_id = j.id order by v.version desc
+
SELECT
- j.`id`,j.`project_name`,j.`name`,j.`job_type`,j.`label`,j.`description`,j.`submit_user`,j.`submit_user` as create_by,l.version current_version
- FROM `linkis_stream_job` j LEFT JOIN `linkis_stream_job_version` l ON j.`id`=l.job_id AND j.project_name = #{projectName}
+ j.`id`,j.`project_name`,j.`name`,j.`job_type`,j.`label`,j.`description`,j.`submit_user`,j.`submit_user` as create_by, j.`current_version`
+ FROM `linkis_stream_job` j LEFT JOIN `linkis_stream_job_version` l ON j.`id`= l.job_id AND j.project_name = #{projectName}
SELECT
- j.`id`,j.`project_name`,j.`name`,j.`job_type`,j.`label`,j.`description`,j.`submit_user`,l.version current_version
- FROM `linkis_stream_job` j JOIN `linkis_stream_job_version` l ON j.`id`=l.job_id AND j.project_name = #{projectName}
+ j.`id`,j.`project_name`,j.`name`,j.`job_type`,j.`label`,j.`description`,j.`submit_user`,j.`current_version`
+ FROM `linkis_stream_job` j JOIN `linkis_stream_job_version` l ON j.`id`=l.job_id AND j.project_name = #{projectName} AND j.`current_version` = l.version
AND j.name = #{jobName}
- ORDER BY l.create_time DESC LIMIT 1
- INSERT INTO linkis_stream_job(`project_name`,`name`,`create_by`,`label`,`description`,`job_type`,`submit_user`,`create_time`)
- VALUES(#{projectName},#{name},#{createBy},#{label},#{description},#{jobType},#{submitUser},#{createTime})
+ INSERT INTO linkis_stream_job(`project_name`,`name`,`create_by`,`label`,`description`,`job_type`,`submit_user`, `current_version`, `create_time`)
+ VALUES(#{projectName},#{name},#{createBy},#{label},#{description},#{jobType},#{submitUser},#{currentVersion, jdbcType=VARCHAR},#{createTime})
description=#{description},
+
+ current_version=#{currentVersion}
+
WHERE id=#{id}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml
index 6d2a92b78..c3c39bc06 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/dao/impl/StreamTaskMapper.xml
@@ -126,14 +126,14 @@
- SELECT `id`,`job_version_id`,`job_id`, status
+ SELECT `id`,`job_version_id`,`job_id`,`version`, status
,`start_time`,`last_update_time`,
`err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
FROM linkis_stream_task where `job_id`=#{jobId} ORDER BY start_time DESC, id DESC LIMIT 1
- SELECT `id`,`job_version_id`,`job_id`, status
+ SELECT `id`,`job_version_id`,`job_id`,`version`, status
,`start_time`,`last_update_time`,
`err_desc`,`submit_user`, `linkis_job_id`, `linkis_job_info`
FROM linkis_stream_task where `job_id`=#{jobId} AND linkis_job_id IS NOT NULL ORDER BY start_time DESC, id DESC LIMIT 1
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java
index 5259c1c60..7c4d7b743 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/MetaJsonInfo.java
@@ -56,8 +56,12 @@ public class MetaJsonInfo {
private String description;
- private Map jobContent;
+ private Map jobContent;
+ /**
+ * Job configuration
+ */
+ private Map jobConfig;
private String metaInfo;
public String getMetaInfo() {
@@ -131,4 +135,12 @@ public String getComment() {
public void setComment(String comment) {
this.comment = comment;
}
+
+ public Map getJobConfig() {
+ return jobConfig;
+ }
+
+ public void setJobConfig(Map jobConfig) {
+ this.jobConfig = jobConfig;
+ }
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java
index 39f09dee4..19505be37 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJob.java
@@ -29,6 +29,12 @@ public class StreamJob {
private String description;
private String submitUser;
private Date createTime;
+ /**
+ * Current version tab in used
+ */
+ private String currentVersion;
+
+ private int status;
public Long getWorkspaceName() {
return workspaceName;
@@ -62,7 +68,6 @@ public void setId(Long id) {
this.id = id;
}
-
public String getName() {
return name;
}
@@ -111,4 +116,41 @@ public void setJobType(String jobType) {
this.jobType = jobType;
}
+ public String getCurrentVersion() {
+ return currentVersion;
+ }
+
+ public void setCurrentVersion(String currentVersion) {
+ this.currentVersion = currentVersion;
+ }
+
+ public StreamJob() {
+ //parameterless construction
+ }
+
+ public int getStatus() {
+ return status;
+ }
+
+ public void setStatus(int status) {
+ this.status = status;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamJob{" +
+ "id=" + id +
+ ", workspaceName=" + workspaceName +
+ ", name='" + name + '\'' +
+ ", projectName='" + projectName + '\'' +
+ ", jobType='" + jobType + '\'' +
+ ", createBy='" + createBy + '\'' +
+ ", label='" + label + '\'' +
+ ", description='" + description + '\'' +
+ ", submitUser='" + submitUser + '\'' +
+ ", createTime=" + createTime +
+ ", currentVersion='" + currentVersion + '\'' +
+ ", status=" + status +
+ '}';
+ }
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobMode.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobMode.java
new file mode 100644
index 000000000..4b55db6c4
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobMode.java
@@ -0,0 +1,32 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.enums.JobClientType;
+
+
+/**
+ * Stream job mode
+ */
+public enum StreamJobMode {
+ /**
+ * Engine Conn mode
+ */
+ ENGINE_CONN(JobClientType.ATTACH.getName()),
+
+ /**
+ * Client mode
+ */
+ CLIENT(JobClientType.DETACH.getName());
+
+ /**
+ * According to client type
+ */
+ private final String clientType;
+
+ StreamJobMode(String clientType){
+ this.clientType = clientType;
+ }
+
+ public String getClientType(){
+ return this.clientType;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java
index 5e08bc78f..fe354628a 100755
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersion.java
@@ -23,6 +23,10 @@ public class StreamJobVersion {
private Long jobId;
private String version;
private String source;
+ /**
+ * Manage mode
+ */
+ private String manageMode;
private String jobContent;
private String comment;
private Date createTime;
@@ -91,4 +95,27 @@ public String getComment() {
public void setComment(String comment) {
this.comment = comment;
}
+
+ public String getManageMode() {
+ return manageMode;
+ }
+
+ public void setManageMode(String manageMode) {
+ this.manageMode = manageMode;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamJobVersion{" +
+ "id=" + id +
+ ", jobId=" + jobId +
+ ", version='" + version + '\'' +
+ ", source='" + source + '\'' +
+ ", manageMode='" + manageMode + '\'' +
+ ", jobContent='" + jobContent + '\'' +
+ ", comment='" + comment + '\'' +
+ ", createTime=" + createTime +
+ ", createBy='" + createBy + '\'' +
+ '}';
+ }
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java
index 3f0f83187..cd52691a7 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamJobVersionFiles.java
@@ -1,6 +1,6 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java
index ac4064e3c..03836157f 100755
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/StreamTask.java
@@ -23,6 +23,7 @@ public class StreamTask {
private Long id;
private Long jobVersionId;
private Long jobId;
+ private String jobType;
private String submitUser;
private Date startTime;
private Date lastUpdateTime;
@@ -132,4 +133,30 @@ public String getLinkisJobInfo() {
public void setLinkisJobInfo(String linkisJobInfo) {
this.linkisJobInfo = linkisJobInfo;
}
+
+ public String getJobType() {
+ return jobType;
+ }
+
+ public void setJobType(String jobType) {
+ this.jobType = jobType;
+ }
+
+ @Override
+ public String toString() {
+ return "StreamTask{" +
+ "id=" + id +
+ ", jobVersionId=" + jobVersionId +
+ ", jobId=" + jobId +
+ ", jobType='" + jobType + '\'' +
+ ", submitUser='" + submitUser + '\'' +
+ ", startTime=" + startTime +
+ ", lastUpdateTime=" + lastUpdateTime +
+ ", linkisJobId='" + linkisJobId + '\'' +
+ ", linkisJobInfo='" + linkisJobInfo + '\'' +
+ ", errDesc='" + errDesc + '\'' +
+ ", version='" + version + '\'' +
+ ", status=" + status +
+ '}';
+ }
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java
index 1e101a8a4..375594144 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/ExecResultVo.java
@@ -1,6 +1,6 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java
index f01f301f3..5ca58a1f4 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobDetailsVo.java
@@ -15,7 +15,8 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.LinkisJobInfo;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.LinkisJobInfo;
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobMode;
import java.util.List;
@@ -25,6 +26,10 @@ public class JobDetailsVo {
private List dataNumber;
private List loadCondition;
private LinkisJobInfo linkisJobInfo;
+ /**
+ * Manage mode
+ */
+ private String manageMode = StreamJobMode.ENGINE_CONN.getClientType();
public LinkisJobInfo getLinkisJobInfo() {
return linkisJobInfo;
@@ -58,6 +63,14 @@ public void setLoadCondition(List loadCondition) {
this.loadCondition = loadCondition;
}
+ public String getManageMode() {
+ return manageMode;
+ }
+
+ public void setManageMode(String manageMode) {
+ this.manageMode = manageMode;
+ }
+
public static class RealTimeTrafficDTO {
private String sourceKey;
private String sourceSpeed;
@@ -202,4 +215,5 @@ public void setTotalMemory(String totalMemory) {
this.totalMemory = totalMemory;
}
}
+
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java
new file mode 100644
index 000000000..0998277a5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobInspectVo.java
@@ -0,0 +1,19 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.fasterxml.jackson.annotation.JsonIgnore;
+import com.fasterxml.jackson.annotation.JsonInclude;
+
+/**
+ * Job inspect vo
+ */
+@JsonInclude(JsonInclude.Include.NON_EMPTY)
+public interface JobInspectVo {
+
+ enum Types{
+ VERSION, SNAPSHOT, STATUS, LIST
+ }
+ @JsonIgnore
+ String getInspectName();
+
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobListInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobListInspectVo.java
new file mode 100644
index 000000000..700f435c5
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobListInspectVo.java
@@ -0,0 +1,39 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.YarnAppVo;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+
+public class JobListInspectVo implements JobInspectVo {
+
+ private List list;
+
+ @Override
+ public String getInspectName() {
+ return Types.LIST.name().toLowerCase(Locale.ROOT);
+ }
+
+ public List getList() {
+ return list;
+ }
+
+ public void setList(List list) {
+ this.list = list;
+ }
+
+ public void addOneUrl(String appId, String url, String state) {
+ if (null == list) {
+ list = new ArrayList<>();
+ }
+ list.add(new YarnAppVo(appId, url, state));
+ }
+
+ public void addYarnApp(YarnAppVo app) {
+ if (null == list) {
+ list = new ArrayList<>();
+ }
+ list.add(app);
+ }
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java
new file mode 100644
index 000000000..59f89b0c1
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobSnapshotInspectVo.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import java.util.Locale;
+
+public class JobSnapshotInspectVo implements JobInspectVo{
+ /**
+ * Path
+ */
+ private String path;
+
+ @Override
+ public String getInspectName() {
+ return Types.SNAPSHOT.name().toLowerCase(Locale.ROOT);
+ }
+
+ public String getPath() {
+ return path;
+ }
+
+ public void setPath(String path) {
+ this.path = path;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java
new file mode 100644
index 000000000..2f06703dc
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/JobVersionInspectVo.java
@@ -0,0 +1,43 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo;
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJobVersion;
+
+import java.util.Locale;
+
+/**
+ * Version inspect
+ */
+public class JobVersionInspectVo implements JobInspectVo{
+
+ /**
+ * Current version
+ */
+ private StreamJobVersion now;
+
+ /**
+ * Last version
+ */
+ private StreamJobVersion last;
+
+ @Override
+ public String getInspectName() {
+ return Types.VERSION.name().toLowerCase(Locale.ROOT);
+ }
+
+
+ public StreamJobVersion getNow() {
+ return now;
+ }
+
+ public void setNow(StreamJobVersion now) {
+ this.now = now;
+ }
+
+ public StreamJobVersion getLast() {
+ return last;
+ }
+
+ public void setLast(StreamJobVersion last) {
+ this.last = last;
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java
index ac41b5d0b..873fee724 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/PauseResultVo.java
@@ -1,6 +1,6 @@
/*
* Copyright 2021 WeBank
- * Licensed under the Apache License, Version 2.0 (the "License");
+ * Licensed under the Apache License, Version 2.0 (the "License")
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java
index 4a893f826..b8326080d 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/entity/vo/QueryJobListVo.java
@@ -24,12 +24,21 @@ public class QueryJobListVo {
private String projectId;
private String projectName;
private String jobType;
+ private String manageMode;
private String label;
private String createBy;
private Date createTime;
private Integer status;
private String version;
+ /**
+ * Last version
+ */
+ private String lastVersion;
private Date lastVersionTime;
+ /**
+ * Number of version forward
+ */
+ private Integer versionForwards;
private String description;
public Long getId() {
@@ -80,6 +89,14 @@ public void setJobType(String jobType) {
this.jobType = jobType;
}
+ public String getManageMode() {
+ return manageMode;
+ }
+
+ public void setManageMode(String manageMode) {
+ this.manageMode = manageMode;
+ }
+
public String getLabel() {
return label;
}
@@ -135,4 +152,20 @@ public String getDescription() {
public void setDescription(String description) {
this.description = description;
}
+
+ public Integer getVersionForwards() {
+ return versionForwards;
+ }
+
+ public void setVersionForwards(Integer versionForwards) {
+ this.versionForwards = versionForwards;
+ }
+
+ public String getLastVersion() {
+ return lastVersion;
+ }
+
+ public void setLastVersion(String lastVersion) {
+ this.lastVersion = lastVersion;
+ }
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java
index 22153bb25..b1eeb802c 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/exception/FileExceptionManager.java
@@ -20,8 +20,11 @@
import java.util.Map;
public class FileExceptionManager {
+
+ private FileExceptionManager(){}
+
//30600-30700
- private static Map desc = new HashMap(32);
+ private static Map desc = new HashMap<>(32);
static {
desc.put("30600", "%s length exceeds limit(长度超出限制),Please limit input within %d characters");
desc.put("30601", "%s should only contains numeric/English characters and '-'(仅允许包含数字,英文和中划线)");
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java
index 9cd05e9e2..a3349aeb3 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/CookieUtils.java
@@ -22,13 +22,15 @@ public class CookieUtils {
private static final String COOKIE_WORKSPACE_ID = "workspaceId";
+ private CookieUtils(){}
+
static String getCookieValue(HttpServletRequest request, String name) {
Cookie c = getCookie(request, name);
return c == null ? null : c.getValue();
}
static Cookie getCookie(HttpServletRequest request, String name) {
- Cookie cookies[] = request.getCookies();
+ Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if (name.equals(cookie.getName())) {
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java
index 9324f0f50..c92e28ffd 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/IoUtils.java
@@ -28,15 +28,17 @@
public class IoUtils {
private static Logger logger = LoggerFactory.getLogger(IoUtils.class);
- private static final String dateFormat_day = "yyyyMMdd";
- private static final String dateFormat_time = "HHmmss";
- private static final String IOUrl = CommonVars.apply("wds.streamis.zip.dir", "/tmp").getValue();
+ private static final String dateFormatDay = "yyyyMMdd";
+ private static final String dateFormatTime = "HHmmss";
+ private static final String ioUrl = CommonVars.apply("wds.streamis.zip.dir", "/tmp").getValue();
+
+ private IoUtils(){}
public static String generateIOPath(String userName, String projectName, String subDir) {
- String baseIOUrl = IOUrl;
+ String baseIOUrl = ioUrl;
String file = subDir.substring(0,subDir.lastIndexOf("."));
- String dayStr = new SimpleDateFormat(dateFormat_day).format(new Date());
- String timeStr = new SimpleDateFormat(dateFormat_time).format(new Date());
+ String dayStr = new SimpleDateFormat(dateFormatDay).format(new Date());
+ String timeStr = new SimpleDateFormat(dateFormatTime).format(new Date());
return addFileSeparator(baseIOUrl, projectName, dayStr, userName, file + "_" + timeStr, subDir);
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/JobUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/JobUtils.scala
new file mode 100644
index 000000000..852e16924
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/JobUtils.scala
@@ -0,0 +1,19 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.util
+
+import org.apache.commons.lang3.StringUtils
+
+object JobUtils {
+
+ def escapeChar(string: String): String = {
+ var str = string
+ if (StringUtils.isNotBlank(string)) {
+ str = string.replace("\\\\", "\\\\\\\\")
+ .replace("_", "\\_")
+ .replace("\'", "\\'")
+ .replace("%", "\\%")
+ .replace("*", "\\*")
+ }
+ str
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java
index b36b16601..dd295b4df 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ReaderUtils.java
@@ -58,6 +58,7 @@ public PublishRequestVo parseFile(String dirPath) throws IOException, FileExcept
try (InputStream inputStream = generateInputStream(basePath)) {
return read(inputStream);
} catch (Exception e) {
+ LOG.error(e.getMessage());
throw e;
}
}
@@ -134,12 +135,27 @@ private MetaJsonInfo readJson(BufferedReader reader) throws IOException, FileExc
public MetaJsonInfo parseJson(String dirPath) throws IOException, FileException {
getBasePath(dirPath);
- try (InputStream inputStream = generateInputStream(basePath);
- InputStreamReader streamReader = new InputStreamReader(inputStream);
- BufferedReader reader = new BufferedReader(streamReader);) {
+ InputStream inputStream = null;
+ InputStreamReader streamReader = null;
+ try {
+ inputStream = generateInputStream(basePath);
+ streamReader = new InputStreamReader(inputStream);
+ BufferedReader reader = new BufferedReader(streamReader);
return readJson(reader);
} catch (Exception e) {
+ LOG.error(e.getMessage());
throw e;
+ } finally {
+ try {
+ if (null != inputStream) {
+ inputStream.close();
+ }
+ if (null != streamReader) {
+ streamReader.close();
+ }
+ } catch (Exception e1) {
+ LOG.warn("close stream error, {}", e1.getMessage());
+ }
}
}
@@ -156,6 +172,7 @@ private PublishRequestVo read(InputStream inputStream) throws IOException, FileE
BufferedReader reader = new BufferedReader(streamReader);) {
return readFile(reader);
} catch (Exception e) {
+ LOG.error(e.getMessage());
throw e;
}
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java
index 75439b988..914edce53 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-base/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/util/ZipHelper.java
@@ -29,12 +29,12 @@
public class ZipHelper {
private static final Logger logger = LoggerFactory.getLogger(ZipHelper.class);
- private static final String ZIP_CMD = "zip";
private static final String UN_ZIP_CMD = "unzip";
- private static final String RECURSIVE = "-r";
private static final String ZIP_TYPE = ".zip";
- public static String unzip(String dirPath)throws Exception { //"D:\\tmp\\streamis\\20210922\\johnnwang\\ab_175950\\ab.zip"
+ private ZipHelper(){}
+
+ public static String unzip(String dirPath)throws Exception {
File file = new File(dirPath);
if(!file.exists()){
logger.error("{} does not exist, can not unzip", dirPath);
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml
index aec49b65b..d2de5d4dc 100755
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
../../pom.xml
4.0.0
@@ -31,9 +31,17 @@
8
8
+ 4.12
+
+
+ junit
+ junit
+ ${junit.version}
+ test
+
com.webank.wedatasphere.streamis
streamis-job-manager-base
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java
index 98f0267c9..391490cd0 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/JobManagerAutoConfiguration.java
@@ -19,7 +19,6 @@
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.StreamisScheduler;
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.StreamisSchedulerExecutorManager;
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.TenancyConsumerManager;
-import org.apache.linkis.scheduler.Scheduler;
import org.apache.linkis.scheduler.executer.ExecutorManager;
import org.apache.linkis.scheduler.queue.ConsumerManager;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java
index e0361e7c4..a5fcfe71e 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/AlertConf.java
@@ -21,13 +21,14 @@
import org.apache.linkis.common.conf.CommonVars;
-public interface AlertConf {
+public class AlertConf {
- CommonVars ALERT_IP = CommonVars.apply("wds.streamis.alert.streamis.ip", "127.0.0.1");
+ private AlertConf(){}
+ public static final CommonVars ALERT_IP = CommonVars.apply("wds.streamis.alert.streamis.ip", "127.0.0.1");
- CommonVars ALERT_SUB_SYS_ID = CommonVars.apply("wds.streamis.alert.streamis.systemid", "7495");
+ public static final CommonVars ALERT_SUB_SYS_ID = CommonVars.apply("wds.streamis.alert.streamis.systemid", "7495");
- Gson COMMON_GSON = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create();
+ public static final Gson COMMON_GSON = new GsonBuilder().setDateFormat("yyyy-MM-dd'T'HH:mm:ssZ").create();
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java
index 85f820417..516ff9ee7 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/alert/DefaultAlerter.java
@@ -17,7 +17,6 @@
import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamAlertMapper;
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamAlertRecord;
-import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob;
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java
index 21e5d50ca..67b6d556f 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/project/service/impl/ProjectPrivilegeServiceImpl.java
@@ -26,7 +26,7 @@ public class ProjectPrivilegeServiceImpl implements ProjectPrivilegeService {
@Autowired
RestTemplate restTemplate;
- private String url_prefix = Configuration.getGateWayURL()+ ServerConfiguration.BDP_SERVER_RESTFUL_URI().getValue()+ "/streamis/project/projectPrivilege";
+ private String urlPrefix = Configuration.getGateWayURL()+ ServerConfiguration.BDP_SERVER_RESTFUL_URI().getValue()+ "/streamis/project/projectPrivilege";
@Override
public Boolean hasReleasePrivilege(HttpServletRequest req, String projectName) {
@@ -71,7 +71,7 @@ public Boolean hasAccessPrivilege(HttpServletRequest req, List projectNa
}
private Map getResponseData(String reqPath, HttpServletRequest req){
- String url = url_prefix + reqPath;
+ String url = urlPrefix + reqPath;
HttpHeaders headers = new HttpHeaders();
headers.add("Cookie",req.getHeader("Cookie"));
headers.setContentType(MediaType.APPLICATION_JSON);
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java
index feadc2bab..5de042b37 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisScheduler.java
@@ -44,6 +44,7 @@ public class StreamisScheduler extends AbstractScheduler implements FutureSchedu
public static class Constraints{
+ private Constraints (){}
private static final CommonVars TENANCY_PATTERN = CommonVars.apply("wds.streamis.job.scheduler.consumer.tenancies", "hadoop");
private static final CommonVars GROUP_INIT_CAPACITY = CommonVars.apply("wds.streamis.job.scheduler.group.min.capacity", 1000);
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java
index 602d0d82f..dbd1eda5b 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/StreamisSchedulerExecutorManager.java
@@ -57,12 +57,12 @@ public Executor[] getByGroup(String groupName) {
@Override
public void delete(Executor executor) {
-
+ //nothing
}
@Override
public void shutdown() {
-
+ //nothing
}
private Executor getOrCreateExecutor(){
@@ -114,7 +114,7 @@ public ExecutorInfo getExecutorInfo() {
@Override
public void close() throws IOException {
-
+ //nothing
}
}
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java
index 896d1d465..dd0fea507 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/TenancyConsumerManager.java
@@ -134,11 +134,11 @@ protected ExecutorService getOrCreateExecutorService(String groupName){
String tenancy = tenancyGroupFactory.getTenancyByGroupName(groupName);
groupFactory.getGroup(groupName);
if (StringUtils.isNotBlank(tenancy)){
- return tenancyExecutorServices.computeIfAbsent(tenancy, tenancyName -> {
+ return tenancyExecutorServices.computeIfAbsent(tenancy, tenancyName ->
// Use the default value of max running jobs
- return Utils.newCachedThreadPool(tenancyGroupFactory.getDefaultMaxRunningJobs() + 1,
- TenancyGroupFactory.GROUP_NAME_PREFIX + tenancy + "-Executor-", true);
- });
+ Utils.newCachedThreadPool(tenancyGroupFactory.getDefaultMaxRunningJobs() + 1,
+ TenancyGroupFactory.GROUP_NAME_PREFIX + tenancy + "-Executor-", true)
+ );
}
}
return getOrCreateExecutorService();
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java
index 383dba8b6..38dfe6a97 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/java/com/webank/wedatasphere/streamis/jobmanager/manager/scheduler/events/AbstractStreamisSchedulerEvent.java
@@ -15,7 +15,6 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events;
-import com.ctc.wstx.util.StringUtil;
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.StreamisSchedulerEvent;
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleException;
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.exception.StreamisScheduleRetryException;
@@ -73,7 +72,7 @@ public abstract class AbstractStreamisSchedulerEvent extends Job implements Stre
*/
protected Map resultSet = new HashMap<>();
- public AbstractStreamisSchedulerEvent(){
+ protected AbstractStreamisSchedulerEvent(){
setJobListener(new JobListener() {
@Override
public void onJobInited(Job job) {
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileContainer.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileContainer.scala
new file mode 100644
index 000000000..98d974f8c
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileContainer.scala
@@ -0,0 +1,36 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.material
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile
+import java.util
+/**
+ * Define the stream file container
+ */
+trait StreamFileContainer {
+ /**
+ * Container name
+ * @return
+ */
+ def getContainerName: String
+
+ /**
+ * Get stream files
+ * @return
+ */
+ def getStreamFiles: util.List[StreamisFile]
+
+ /**
+ * Get stream files by match function
+ * @param matchFunc match function
+ * @return
+ */
+ def getStreamFiles(matchFunc: StreamisFile => Boolean): util.List[StreamisFile]
+
+ /**
+ * Get stream file by basename, model name and suffix
+ * @param name name
+ * @param model model
+ * @param suffix suffix
+ * @return
+ */
+ def getStreamFile(name: String, model: String, suffix: String): StreamisFile
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileLocalContainer.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileLocalContainer.scala
new file mode 100644
index 000000000..97657c571
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileLocalContainer.scala
@@ -0,0 +1,12 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.material
+
+import org.apache.linkis.common.conf.CommonVars
+
+trait StreamFileLocalContainer extends StreamFileContainer {
+
+}
+
+object StreamFileLocalContainer{
+
+ val STORE_PATH: CommonVars[String] = CommonVars("wds.streamis.job.material.container.local.store-path", "material")
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala
index b804088e6..c19b90811 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/BMLService.scala
@@ -15,6 +15,7 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.service
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobCreateErrorException
import java.io.{ByteArrayInputStream, File, FileInputStream, InputStream}
import java.util
@@ -22,7 +23,6 @@ import org.apache.linkis.bml.client.{BmlClient, BmlClientFactory}
import org.apache.linkis.bml.protocol.{BmlUpdateResponse, BmlUploadResponse}
import org.apache.linkis.common.exception.ErrorException
import org.apache.linkis.common.utils.{Logging, Utils}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobCreateErrorException
import javax.annotation.PreDestroy
import org.apache.commons.lang.StringUtils
import org.springframework.stereotype.Component
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobInspectService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobInspectService.scala
new file mode 100644
index 000000000..49fda7aa6
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobInspectService.scala
@@ -0,0 +1,160 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.service
+import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.errorcode.JobLaunchErrorCode
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.{JobCreateErrorException, JobErrorException, JobFetchErrorException}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.LinkisFlinkManagerJobClient
+import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper}
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion}
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{JobInspectVo, JobListInspectVo, JobSnapshotInspectVo, JobVersionInspectVo}
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.common.exception.ErrorException
+import org.apache.linkis.common.utils.{JsonUtils, Logging, Utils}
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.stereotype.Service
+import org.springframework.transaction.annotation.Transactional
+
+import java.net.URI
+import java.util
+import scala.collection.JavaConverters.asScalaBufferConverter
+
+@Service
+class DefaultStreamJobInspectService extends StreamJobInspectService with Logging{
+
+ @Autowired
+ private var streamTaskService: StreamTaskService = _
+
+ @Autowired
+ private var streamJobMapper: StreamJobMapper = _
+
+ @Autowired
+ private var streamTaskMapper: StreamTaskMapper = _
+
+ @Autowired
+ private var streamJobConfMapper: StreamJobConfMapper = _
+ /**
+ * Inspect method
+ *
+ * @param jobId job id
+ * @param types type list for inspecting
+ * @return
+ */
+ @throws(classOf[ErrorException])
+ @Transactional(rollbackFor = Array(classOf[Exception]))
+ override def inspect(jobId: Long, types: Array[JobInspectVo.Types]): util.List[JobInspectVo] = {
+ val inspectVos: util.List[JobInspectVo] = new util.ArrayList[JobInspectVo]
+ // Lock the stream job
+ Option(this.streamJobMapper.queryAndLockJobById(jobId)) match {
+ case Some(streamJob) =>
+ types.foreach {
+ case JobInspectVo.Types.VERSION =>
+ Option(versionInspect(streamJob)).foreach(inspectVos.add(_))
+ case JobInspectVo.Types.SNAPSHOT =>
+ Option(snapshotInspect(streamJob)).foreach(inspectVos.add(_))
+ case JobInspectVo.Types.LIST =>
+ Option(listInspect(streamJob)).foreach(inspectVos.add(_))
+ case _ => null
+ // Do nothing
+ }
+ case _ => //Ignore
+ }
+ inspectVos
+ }
+
+ /**
+ * Inspect the job version
+ * @param streamJob stream job
+ * @return
+ */
+ private def versionInspect(streamJob: StreamJob): JobVersionInspectVo = {
+ val inspectVo = new JobVersionInspectVo
+ val latestJobVersion = streamJobMapper.getLatestJobVersion(streamJob.getId)
+ inspectVo.setNow(latestJobVersion)
+ Option(streamTaskMapper.getLatestByJobId(streamJob.getId)) match {
+ case Some(task) =>
+ val lastJobVersion = streamJobMapper.getJobVersionById(streamJob.getId, task.getVersion)
+ inspectVo.setLast(lastJobVersion)
+ case _ =>
+ }
+ inspectVo
+ }
+
+ /**
+ * Inspect the snapshot
+ * @param streamJob stream job
+ * @return
+ */
+ private def snapshotInspect(streamJob: StreamJob): JobSnapshotInspectVo = {
+ Option(this.streamJobConfMapper.getRawConfValue(streamJob.getId, JobConfKeyConstants.SAVEPOINT.getValue + "path")) match {
+ case Some(path) =>
+ val inspectVo = new JobSnapshotInspectVo
+ inspectVo.setPath(new URI(path).toString)
+ inspectVo
+ case _ => this.streamJobConfMapper.getRawConfValue(streamJob.getId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH.getValue) match {
+ case "ON" =>
+ Option(this.streamTaskService.getStateInfo(streamTaskMapper
+ .getLatestLaunchedById(streamJob.getId))) match {
+ case Some(jobState) =>
+ val inspectVo = new JobSnapshotInspectVo
+ inspectVo.setPath(jobState.getLocation.toString)
+ inspectVo
+ case _ => null
+ }
+ case _ => null
+ }
+ }
+
+ }
+
+ private def listInspect(job: StreamJob): JobListInspectVo = {
+ // 如果分离式特性开关开启,就获取分离式client,发送list请求
+ val listVo = new JobListInspectVo
+ if (JobLauncherConfiguration.ENABLE_FLINK_MANAGER_EC_ENABLE.getValue && JobLauncherConfiguration.ENABLE_FLINK_LIST_INSPECT.getHotValue) {
+ val appName = s"${job.getProjectName}.${job.getName}"
+ Utils.tryCatch {
+ val appType = if (job.getJobType.toLowerCase().contains("flink")) {
+ JobConstants.APP_TYPE_FLINK
+ } else if (job.getJobType.toLowerCase().contains("spark")) {
+ JobConstants.APP_TYPE_SPARK
+ } else {
+ logger.error(s"Unknown job type : ${job.getJobType}")
+ throw new JobCreateErrorException(JobLaunchErrorCode.JOB_LIST_YARN_APP_ERROR, s"Unknown job type : ${job.getJobType}")
+ }
+ val appTypeList = new util.ArrayList[String]()
+ if (StringUtils.isNotBlank(appType)) {
+ appTypeList.add(appType)
+ }
+ logger.info(s"job appType is : ${appType}")
+ val appList = LinkisFlinkManagerJobClient.listYarnApp(appName, job.getSubmitUser, "streamis", appTypeList)
+ if (null != appList && !appList.isEmpty) {
+ appList.asScala.foreach{
+ app =>
+ if (app.getApplicationName().equalsIgnoreCase(appName)) {
+ listVo.addYarnApp(app)
+ } else {
+ logger.info(s"yarn app name : ${app.getApplicationName()} like but not equals job name : ${appName}, ignore it. ")
+ }
+ }
+ logger.info(s"There are ${listVo.getList.size()} apps with same name : ${appName}")
+ logger.info(JsonUtils.jackson.writeValueAsString(appList))
+ } else {
+ listVo.addOneUrl(null, "无", null)
+ }
+ } {
+ case e: Exception =>
+ val msg = s"查询同名运行中yarn应用失败,请稍后再试。appName: ${appName}, user : ${job.getSubmitUser}. ${e.getMessage}"
+ logger.error(msg, e)
+ throw new JobFetchErrorException(JobLaunchErrorCode.JOB_LIST_YARN_APP_ERROR, msg)
+ }
+ } else if (JobLauncherConfiguration.ENABLE_FLINK_LIST_INSPECT.getValue) {
+ // default notice
+ listVo.addOneUrl(null, "管理员未开启管理引擎特性,无法查看运行中同名yarn应用", null)
+ } else {
+ listVo.addOneUrl(null, "管理员未开启检查运行中同名yarn应用特性", null)
+ }
+ listVo
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala
index c3f467118..874348c33 100755
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamJobService.scala
@@ -16,27 +16,29 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.service
import java.util
-import java.util.Date
+import java.util.{Date, List}
import com.github.pagehelper.PageInfo
import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.constants.JobConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.{JobCreateErrorException, JobFetchErrorException}
import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService
import com.webank.wedatasphere.streamis.jobmanager.manager.alert.AlertLevel
-import org.apache.linkis.common.exception.ErrorException
-import org.apache.linkis.common.utils.Logging
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf
import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamAlertMapper, StreamJobMapper, StreamTaskMapper}
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity._
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{QueryJobListVo, TaskCoreNumVo, VersionDetailVo}
-import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{MetaJsonInfo, StreamAlertRecord, StreamJob, StreamJobVersion, StreamJobVersionFiles}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobCreateErrorException, JobFetchErrorException}
+import com.webank.wedatasphere.streamis.jobmanager.manager.service.DefaultStreamJobService.JobDeployValidateResult
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJobContent
-import com.webank.wedatasphere.streamis.jobmanager.manager.util.{ReaderUtils, ZipHelper}
+import com.webank.wedatasphere.streamis.jobmanager.manager.util.{JobUtils, ReaderUtils, ZipHelper}
import org.apache.commons.lang.StringUtils
+import org.apache.commons.lang3.ObjectUtils
+import org.apache.linkis.common.exception.ErrorException
+import org.apache.linkis.common.utils.Logging
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import org.springframework.transaction.annotation.Transactional
-import javax.annotation.Resource
import scala.collection.JavaConverters._
@@ -60,8 +62,17 @@ class DefaultStreamJobService extends StreamJobService with Logging {
this.streamJobMapper.getJobById(jobId)
}
- override def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String): PageInfo[QueryJobListVo] = {
- val streamJobList = streamJobMapper.getJobLists(projectName, userName, jobName, jobStatus, jobCreator)
+ override def getJobByName(jobName: String): util.List[StreamJob] = streamJobMapper.getJobByName(jobName)
+
+ override def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String, label: String): PageInfo[QueryJobListVo] = {
+ var streamJobList: util.List[QueryJobListVo] = null
+ if (StringUtils.isNotBlank(jobName) && jobName.contains(JobConstants.JOB_NAME_DELIMITER)) {
+ val jobNameList = new util.ArrayList[String]()
+ jobName.split(JobConstants.JOB_NAME_DELIMITER).filter(StringUtils.isNotBlank(_)).foreach(jobNameList.add)
+ streamJobList = streamJobMapper.getJobLists(projectName, userName, null, jobStatus, jobCreator, JobUtils.escapeChar(label), JobConfKeyConstants.MANAGE_MODE_KEY.getValue, jobNameList)
+ } else {
+ streamJobList = streamJobMapper.getJobLists(projectName, userName, JobUtils.escapeChar(jobName), jobStatus, jobCreator, JobUtils.escapeChar(label), JobConfKeyConstants.MANAGE_MODE_KEY.getValue, null)
+ }
if (streamJobList != null && !streamJobList.isEmpty) {
val pageInfo = new PageInfo[QueryJobListVo](streamJobList)
return pageInfo
@@ -69,11 +80,24 @@ class DefaultStreamJobService extends StreamJobService with Logging {
new PageInfo[QueryJobListVo](new util.ArrayList[QueryJobListVo]())
}
+ /**
+ * Page list query of version info
+ *
+ * @param jobId job id
+ * @return
+ */
+ override def getVersionList(jobId: Long): PageInfo[VersionDetailVo] = {
+ val jobVersions = streamJobMapper.getJobVersionDetails(jobId)
+ if (null == jobVersions){
+ new PageInfo[VersionDetailVo](new util.ArrayList[VersionDetailVo]())
+ } else new PageInfo[VersionDetailVo](jobVersions)
+ }
+
/**
* COre indicator(核心指标)
*/
override def countByCores(projectName: String, userName: String): TaskCoreNumVo = {
- val jobs = streamJobMapper.getJobLists(projectName, userName, null, null, null)
+ val jobs = streamJobMapper.getJobLists(projectName, userName, null, null, null, null, JobConfKeyConstants.MANAGE_MODE_KEY.getValue, null)
val taskNum = new TaskCoreNumVo()
taskNum.setProjectName(projectName)
if (jobs != null && !jobs.isEmpty) {
@@ -100,7 +124,7 @@ class DefaultStreamJobService extends StreamJobService with Logging {
}
- override def updateVersion(preVersion: String): String = {
+ override def rollingJobVersion(preVersion: String): String = {
val newVersion = preVersion.substring(1).toInt + 1
val codeFormat = "%05d"
"v" + String.format(codeFormat, new Integer(newVersion))
@@ -123,38 +147,52 @@ class DefaultStreamJobService extends StreamJobService with Logging {
}
- override def createStreamJob(metaJsonInfo: MetaJsonInfo, userName: String): StreamJobVersion = {
+ override def deployStreamJob(streamJob: StreamJob,
+ metaJsonInfo: MetaJsonInfo, userName: String, updateVersion: Boolean): StreamJobVersion = {
if(StringUtils.isBlank(metaJsonInfo.getJobType))
throw new JobCreateErrorException(30030, s"jobType is needed.")
+ else if(!JobConf.SUPPORTED_JOB_TYPES.getValue.contains(metaJsonInfo.getJobType)) {
+ throw new JobCreateErrorException(30030, s"jobType ${metaJsonInfo.getJobType} is not supported.")
+ }
if(metaJsonInfo.getJobContent == null || metaJsonInfo.getJobContent.isEmpty)
throw new JobCreateErrorException(30030, s"jobContent is needed.")
- val job = streamJobMapper.getCurrentJob(metaJsonInfo.getProjectName, metaJsonInfo.getJobName)
- val streamJob = new StreamJob()
val jobVersion = new StreamJobVersion()
- if (job == null) {
- streamJob.setCreateBy(userName)
- streamJob.setSubmitUser(userName)
- streamJob.setJobType(metaJsonInfo.getJobType)
- streamJob.setDescription(metaJsonInfo.getDescription)
+ val newStreamJob = new StreamJob()
+ if (streamJob == null) {
+ logger.info("StreamJob is null, create a new streamJob")
jobVersion.setVersion("v00001")
- streamJob.setCreateTime(new Date())
- streamJob.setLabel(metaJsonInfo.getTags)
- streamJob.setName(metaJsonInfo.getJobName)
- streamJob.setProjectName(metaJsonInfo.getProjectName)
- streamJobMapper.insertJob(streamJob)
+ newStreamJob.setCreateBy(userName)
+ newStreamJob.setSubmitUser(userName)
+ newStreamJob.setJobType(metaJsonInfo.getJobType)
+ newStreamJob.setDescription(metaJsonInfo.getDescription)
+ newStreamJob.setCurrentVersion(jobVersion.getVersion)
+ newStreamJob.setCreateTime(new Date())
+ newStreamJob.setLabel(metaJsonInfo.getTags)
+ newStreamJob.setName(metaJsonInfo.getJobName)
+ newStreamJob.setProjectName(metaJsonInfo.getProjectName)
+ streamJobMapper.insertJob(newStreamJob)
} else {
- if(job.getJobType != metaJsonInfo.getJobType)
- throw new JobCreateErrorException(30030, s"StreamJob-${job.getName} has already created with jobType ${job.getJobType}, you cannot change it to ${metaJsonInfo.getJobType}.")
- streamJob.setId(job.getId)
+ val jobVersions = streamJobMapper.getJobVersions(streamJob.getId)
+ if (jobVersions == null || jobVersions.isEmpty) jobVersion.setVersion("v00001")
+ else
+ jobVersion.setVersion(rollingJobVersion(jobVersions.get(0).getVersion))
+ if(streamJob.getJobType != metaJsonInfo.getJobType)
+ throw new JobCreateErrorException(30030, s"StreamJob-${streamJob.getName} has already created with jobType ${streamJob.getJobType}, you cannot change it to ${metaJsonInfo.getJobType}.")
+ streamJob.setId(streamJob.getId)
+ if (updateVersion){
+ // update version
+ streamJob.setCurrentVersion(jobVersion.getVersion)
+ }
if (StringUtils.isNotEmpty(metaJsonInfo.getDescription))
streamJob.setDescription(metaJsonInfo.getDescription)
streamJobMapper.updateJob(streamJob)
- val jobVersions = streamJobMapper.getJobVersions(job.getId)
- if (jobVersions == null || jobVersions.isEmpty) jobVersion.setVersion("v00001")
- else
- jobVersion.setVersion(updateVersion(jobVersions.get(0).getVersion))
}
- jobVersion.setJobId(streamJob.getId)
+ if (ObjectUtils.isNotEmpty(streamJob)) {
+ jobVersion.setJobId(streamJob.getId)
+ } else {
+ logger.info("newStreamJob is {}", newStreamJob)
+ jobVersion.setJobId(newStreamJob.getId)
+ }
jobVersion.setJobContent(metaJsonInfo.getMetaInfo)
jobVersion.setCreateBy(userName)
jobVersion.setCreateTime(new Date)
@@ -173,12 +211,17 @@ class DefaultStreamJobService extends StreamJobService with Logging {
val inputPath = ZipHelper.unzip(inputZipPath)
val readerUtils = new ReaderUtils
val metaJsonInfo = readerUtils.parseJson(inputPath)
- if (StringUtils.isNotBlank(projectName) && projectName!=metaJsonInfo.getProjectName) {
- throw new JobCreateErrorException(30030, s"the projectName ${metaJsonInfo.getProjectName} is not matching the project ")
+ if (StringUtils.isNotBlank(projectName) && !projectName.equals(metaJsonInfo.getProjectName)) {
+ logger.warn(s"The projectName [${metaJsonInfo.getProjectName}] is not matching the project, will change it to [${projectName}] automatically")
+ metaJsonInfo.setProjectName(projectName)
}
- validateUpload(metaJsonInfo.getProjectName, metaJsonInfo.getJobName, userName)
+ val validateResult = validateJobDeploy(metaJsonInfo.getProjectName, metaJsonInfo.getJobName, userName)
// 生成StreamJob,根据StreamJob生成StreamJobVersion
- val version = createStreamJob(metaJsonInfo, userName)
+ val version = deployStreamJob(validateResult.streamJob, metaJsonInfo, userName, validateResult.updateVersion)
+ // Save the job configuration, lock the job again if exists
+ if (null != metaJsonInfo.getJobConfig){
+ this.streamJobConfService.saveJobConfig(version.getJobId, metaJsonInfo.getJobConfig.asInstanceOf[util.Map[String, AnyRef]])
+ }
// 上传所有非meta.json的文件
uploadFiles(metaJsonInfo, version, inputZipPath)
version
@@ -187,10 +230,15 @@ class DefaultStreamJobService extends StreamJobService with Logging {
@throws(classOf[ErrorException])
@Transactional(rollbackFor = Array(classOf[Exception]))
override def createOrUpdate(userName: String, metaJsonInfo: MetaJsonInfo): StreamJobVersion = {
- validateUpload(metaJsonInfo.getProjectName, metaJsonInfo.getJobName, userName)
+ val validateResult = validateJobDeploy(metaJsonInfo.getProjectName, metaJsonInfo.getJobName, userName)
val readerUtils = new ReaderUtils
metaJsonInfo.setMetaInfo(readerUtils.readAsJson(metaJsonInfo))
- createStreamJob(metaJsonInfo, userName)
+ val version = deployStreamJob(validateResult.streamJob, metaJsonInfo, userName, validateResult.updateVersion)
+ // Save the job configuration, lock the job again if exists
+ if (null != metaJsonInfo.getJobConfig){
+ this.streamJobConfService.saveJobConfig(version.getJobId, metaJsonInfo.getJobConfig.asInstanceOf[util.Map[String, AnyRef]])
+ }
+ version
}
override def getJobContent(jobId: Long, version: String): StreamisTransformJobContent = {
@@ -237,6 +285,12 @@ class DefaultStreamJobService extends StreamJobService with Logging {
AlertLevel.valueOf(level)
}
+ override def getLinkisFlinkAlertLevel(job: StreamJob): AlertLevel = {
+ val level = this.streamJobConfService.getJobConfValue(job.getId, JobConfKeyConstants.ALERT_LEVEL.getValue)
+ if (StringUtils.isBlank(level)) return AlertLevel.MAJOR
+ AlertLevel.valueOf(level)
+ }
+
override def isCreator(jobId: Long, username: String): Boolean = {
val job = streamJobMapper.getJobById(jobId)
if (job == null) return false
@@ -249,22 +303,35 @@ class DefaultStreamJobService extends StreamJobService with Logging {
streamAlertMapper.getAlertByJobIdAndVersion(username,jobId,job.getId)
}
- private def validateUpload(projectName: String, jobName: String, userName: String): Unit = {
+ private def validateJobDeploy(projectName: String, jobName: String, userName: String): JobDeployValidateResult = {
if(StringUtils.isBlank(jobName)) throw new JobCreateErrorException(30030, s"jobName is needed.")
if(StringUtils.isBlank(projectName)) throw new JobCreateErrorException(30030, s"projectName is needed.")
// Try to lock the stream job to create version
- Option(streamJobMapper.queryAndLockJobInCondition(projectName, jobName)).foreach(streamJob => {
- // Use the project privilege at restful api
-// if (streamJob.getCreateBy != userName)
-// throw new JobCreateErrorException(30030, s"You have no permission to update StreamJob-$jobName.")
- val jobVersions = streamJobMapper.getJobVersions(streamJob.getId)
- if (jobVersions != null && !jobVersions.isEmpty) {
- val tasks = streamTaskMapper.getTasksByJobIdAndJobVersionId(streamJob.getId, jobVersions.get(0).getId)
- if (tasks != null && !tasks.isEmpty && !JobConf.isCompleted(tasks.get(0).getStatus)) {
- throw new JobCreateErrorException(30030, s"StreamJob-$jobName is in status ${tasks.get(0).getStatus}, you cannot upload the zip.")
+ Option(streamJobMapper.queryAndLockJobInCondition(projectName, jobName)) match {
+ case Some(streamJob) =>
+ var updateVersion = true
+ val task = streamTaskMapper.getLatestByJobId(streamJob.getId)
+ if (task != null && !JobConf.isCompleted(task.getStatus)) {
+ logger.warn(s"StreamJob-$jobName is in status ${task.getStatus}, your deployment will not update the version in job")
+ updateVersion = false
}
- }
- })
+ JobDeployValidateResult(streamJob, updateVersion)
+ case _ =>
+ JobDeployValidateResult(null, updateVersion = true)
+ }
+
}
+ @throws(classOf[ErrorException])
+ @Transactional(rollbackFor = Array(classOf[Exception]))
+ override def updateLabel(streamJob: StreamJob): Unit = streamJobMapper.updateJob(streamJob)
+
+}
+
+object DefaultStreamJobService{
+ /**
+ * Deploy validate result
+ * @param updateVersion should update version
+ */
+ case class JobDeployValidateResult(streamJob: StreamJob, updateVersion: Boolean)
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala
index 1913fc0c9..18efc62d3 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/DefaultStreamTaskService.scala
@@ -15,29 +15,35 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.service
+import java.util
+import java.util.concurrent.Future
+import java.util.{Calendar, function}
import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.{JobErrorException, JobExecuteErrorException, JobFetchErrorException, JobPauseErrorException, JobTaskErrorException}
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager
-import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.{JobGenericState, JobState}
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.{JobInfo, LaunchJob}
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{Checkpoint, Savepoint}
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobClient, FlinkJobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.client.{AbstractJobClient, EngineConnJobClient}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.manager.SimpleFlinkJobLaunchManager
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.{FlinkCheckpoint, FlinkSavepoint}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.EngineConnJobInfo
import com.webank.wedatasphere.streamis.jobmanager.manager.SpringContextHolder
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf.FLINK_JOB_STATUS_FAILED
import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper}
-import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask
-import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobProgressVo, JobStatusVo, PauseResultVo, ScheduleResultVo, StreamTaskListVo}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.{JobErrorException, JobExecuteErrorException, JobFetchErrorException, JobPauseErrorException, JobTaskErrorException}
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo._
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobMode, StreamTask}
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.FutureScheduler
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.AbstractStreamisSchedulerEvent.StreamisEventInfo
-import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.{AbstractStreamisSchedulerEvent, StreamisPhaseInSchedulerEvent}
+import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.StreamisPhaseInSchedulerEvent
import com.webank.wedatasphere.streamis.jobmanager.manager.scheduler.events.StreamisPhaseInSchedulerEvent.ScheduleCommand
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.exception.TransformFailedErrorException
-import com.webank.wedatasphere.streamis.jobmanager.manager.transform.{StreamisTransformJobBuilder, Transform}
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.{StreamisTransformJobBuilder, TaskMetricsParser, Transform}
import com.webank.wedatasphere.streamis.jobmanager.manager.util.DateUtils
import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils
+
+import javax.annotation.Resource
import org.apache.commons.lang.StringUtils
import org.apache.linkis.common.utils.{Logging, Utils}
import org.apache.linkis.httpclient.dws.DWSHttpClient
@@ -47,10 +53,6 @@ import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Service
import org.springframework.transaction.annotation.Transactional
-import java.util
-import java.util.{Calendar, Date, function}
-import java.util.concurrent.Future
-import javax.annotation.Resource
import scala.collection.JavaConverters._
@@ -60,6 +62,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
@Autowired private var streamTaskMapper:StreamTaskMapper=_
@Autowired private var streamJobMapper:StreamJobMapper=_
@Autowired private var streamisTransformJobBuilders: Array[StreamisTransformJobBuilder] = _
+ @Autowired private var taskMetricsParser: Array[TaskMetricsParser] = _
@Resource
private var jobLaunchManager: JobLaunchManager[_ <: JobInfo] = _
@@ -72,6 +75,15 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
@Resource
private var scheduler: FutureScheduler = _
+ /**
+ *
+ * @param Id
+ * @return
+ */
+ override def getTaskById(Id: Long): StreamTask = {
+ this.streamTaskMapper.getTaskById(Id)
+ }
+
/**
* Sync to execute job(task)
@@ -186,28 +198,18 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
override def schedule(context: StreamisPhaseInSchedulerEvent.StateContext, jobInfo: queue.JobInfo): util.Map[String, AnyRef] = {
val newTaskId = context.getVar("newTaskId")
if (null != newTaskId){
- var jobState: JobState = null
- // Means to fetch the job state from task to restore
- if (restore){
- val restoreTaskId = taskId
- // TODO fetch the job stage strategy
- jobState = if (restoreTaskId <= 0){
-// val earlierTasks = streamTaskMapper.getEarlierByJobId(finalJobId, 2)
-// if (earlierTasks.isEmpty){
-// throw new JobExecuteErrorException(-1, "Cannot find the candidate task to search state")
-// } else if (earlierTasks.size() < 2){
-// warn("First time to launch the StreamJob, ignore to restore JobState")
-// null
-// } else {
-// getStateInfo(earlierTasks.get(1))
-// }
- getStateInfo(streamTaskMapper.getLatestLaunchedById(jobId))
- } else getStateInfo(restoreTaskId)
+ val restoreTaskId = taskId
+ val jobState: JobState = if (restoreTaskId <= 0){
+ getStateInfo(streamTaskMapper.getLatestLaunchedById(jobId))
+ } else getStateInfo(restoreTaskId)
+ if (null != jobState){
+ // If jobState.setToRestore(true) means that using the job state to restore stream task
+ jobState.setToRestore(restore)
}
// Launch entrance
launch(newTaskId.asInstanceOf[Long], execUser, jobState)
} else {
- // TODO cannot find the new task id
+ // cannot find the new task id
}
null
}
@@ -330,18 +332,21 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
throw new JobPauseErrorException(-1, s"Unable to pause the StreamTask [$pauseTaskId}], the linkis job id is null")
}
val streamJob = streamJobMapper.getJobById(finalJobId)
- info(s"Try to stop StreamJob [${streamJob.getName} with task(taskId: ${streamTask.getId}, linkisJobId: ${streamTask.getLinkisJobId}).")
- val jobClient = jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo)
- val jobStateInfo = Utils.tryCatch(jobClient.stop(snapshot)){
- case e: Exception =>
- val pauseError = new JobPauseErrorException(-1, s"Fail to stop the StreamJob [${streamJob.getName}] " +
- s"with task(taskId: ${streamTask.getId}, linkisJobId: ${streamTask.getLinkisJobId}), reason: ${e.getMessage}.")
- pauseError.initCause(e)
- throw pauseError
- case pauseE: JobPauseErrorException =>
- throw pauseE
+ logger.info(s"Try to stop StreamJob [${streamJob.getName} with task(taskId: ${streamTask.getId}, linkisJobId: ${streamTask.getLinkisJobId}).")
+ val jobClient = getJobLaunchManager(streamTask).connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo)
+ val status = JobConf.linkisStatusToStreamisStatus(jobClient.getJobInfo(true).getStatus)
+ if (!JobConf.isCompleted(status)) {
+ val jobStateInfo = Utils.tryCatch(jobClient.stop(snapshot)){
+ case e: Exception =>
+ val pauseError = new JobPauseErrorException(-1, s"Fail to stop the StreamJob [${streamJob.getName}] " +
+ s"with task(taskId: ${streamTask.getId}, linkisJobId: ${streamTask.getLinkisJobId}), reason: ${e.getMessage}.")
+ pauseError.initCause(e)
+ throw pauseError
+ case pauseE: JobPauseErrorException =>
+ throw pauseE
+ }
+ Option(jobStateInfo).foreach(stateInfo => resultSet.put("snapshotPath", stateInfo.getLocation))
}
- Option(jobStateInfo).foreach(stateInfo => resultSet.put("snapshotPath", stateInfo.getLocation))
streamTask.setLastUpdateTime(Calendar.getInstance.getTime)
streamTask.setStatus(JobConf.FLINK_JOB_STATUS_STOPPED.getValue)
streamTaskMapper.updateTask(streamTask)
@@ -395,14 +400,30 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
else streamTaskMapper.getLatestByJobId(jobId)
if (null != streamTask && StringUtils.isNotBlank(streamTask.getLinkisJobId)) {
Utils.tryCatch {
- val jobClient = jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo)
+ val jobClient = getJobLaunchManager(streamTask).connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo)
jobClient match {
- case client: FlinkJobClient =>
+ //todo other clients
+ case client: EngineConnJobClient =>
+ requestPayload.setLogHistory(JobConf.isCompleted(streamTask.getStatus))
val logIterator = client.fetchLogs(requestPayload)
returnMap.put("logPath", logIterator.getLogPath)
returnMap.put("logs", logIterator.getLogs)
returnMap.put("endLine", logIterator.getEndLine)
logIterator.close()
+ jobClient.getJobInfo match {
+ case linkisInfo: EngineConnJobInfo =>
+ if (StringUtils.isBlank(linkisInfo.getLogDirSuffix) && StringUtils.isNotBlank(logIterator.getLogDirSuffix)){
+ Utils.tryAndWarn {
+ // Update the linkis job info and store into database
+ linkisInfo.setLogDirSuffix(logIterator.getLogDirSuffix)
+ streamTask.setLinkisJobInfo(DWSHttpClient.jacksonJson.writeValueAsString(linkisInfo));
+ streamTaskMapper.updateTask(streamTask)
+ }
+ }
+ case _ =>
+ }
+ case o =>
+ logger.error(s"Invalid client: ${o}")
}
}{ case e: Exception =>
// Just warn the exception
@@ -424,9 +445,9 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
val streamTask = if (taskId > 0) streamTaskMapper.getTaskById(taskId)
else streamTaskMapper.getLatestByJobId(jobId)
if (null != streamTask && StringUtils.isNotBlank(streamTask.getLinkisJobId)){
- val jobClient = this.jobLaunchManager.connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo)
+ val jobClient = getJobLaunchManager(streamTask).connect(streamTask.getLinkisJobId, streamTask.getLinkisJobInfo)
return jobClient match {
- case flinkJobClient: FlinkJobClient =>
+ case flinkJobClient: AbstractJobClient =>
Option(flinkJobClient.triggerSavepoint()) match {
case Some(savepoint) =>
savepoint.getLocation.toString
@@ -470,12 +491,12 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
}).asJava
}
- def getTask(jobId:Long, version: String): FlinkJobInfo ={
+ def getTaskJobInfo(jobId:Long, version: String): EngineConnJobInfo ={
val str = streamTaskMapper.getTask(jobId, version)
if (StringUtils.isBlank(str)) {
- return new FlinkJobInfo
+ return new EngineConnJobInfo
}
- DWSHttpClient.jacksonJson.readValue(str,classOf[FlinkJobInfo])
+ DWSHttpClient.jacksonJson.readValue(str,classOf[EngineConnJobInfo])
}
@@ -511,6 +532,9 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
}
}
+
+ override def getLatestTaskByJobId(jobId: Long): StreamTask = streamTaskMapper.getLatestByJobId(jobId)
+
/**
* Create new task use the latest job version
*
@@ -520,7 +544,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
*/
@Transactional(rollbackFor = Array(classOf[Exception]))
override def createTask(jobId: Long, status: Int, creator: String): StreamTask = {
- trace(s"Query and lock the StreamJob in [$jobId] before creating StreamTask")
+ logger.trace(s"Query and lock the StreamJob in [$jobId] before creating StreamTask")
Option(streamJobMapper.queryAndLockJobById(jobId)) match {
case None => throw new JobTaskErrorException(-1, s"Unable to create StreamTask, the StreamJob [$jobId] is not exists.")
case Some(job) =>
@@ -528,23 +552,32 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
Option(streamJobMapper.getLatestJobVersion(jobId)) match {
case None => throw new JobTaskErrorException(-1, s"No versions can be found for job [id: ${job.getId}, name: ${job.getName}]")
case Some(jobVersion) =>
- info(s"Fetch the latest version: ${jobVersion.getVersion} for job [id: ${job.getId}, name: ${job.getName}]")
- // Get the latest task by job version id
- val latestTask = streamTaskMapper.getLatestByJobVersionId(jobVersion.getId, jobVersion.getVersion)
+ var noticeMessage = s"Fetch the latest version: ${jobVersion.getVersion} for job [id: ${job.getId}, name: ${job.getName}]"
+ if (!jobVersion.getVersion.equals(job.getCurrentVersion)){
+ noticeMessage += s", last version used for task is ${job.getCurrentVersion}"
+ // Update job current version
+ job.setCurrentVersion(jobVersion.getVersion)
+ streamJobMapper.updateJob(job)
+ }
+ logger.info(noticeMessage)
+ // Get the latest task by job id
+ val latestTask = streamTaskMapper.getLatestByJobId(jobId)
if (null == latestTask || JobConf.isCompleted(latestTask.getStatus)){
- val streamTask = new StreamTask(jobId, jobVersion.getId, jobVersion.getVersion, creator)
- streamTask.setStatus(status)
- info(s"Produce a new StreamTask [jobId: $jobId, version: ${jobVersion.getVersion}, creator: $creator, status: ${streamTask.getStatus}]")
- streamTaskMapper.insertTask(streamTask)
- streamTask
+ val streamTask = new StreamTask(jobId, jobVersion.getId, jobVersion.getVersion, creator)
+ streamTask.setStatus(status)
+ logger.info(s"Produce a new StreamTask [jobId: $jobId, version: ${jobVersion.getVersion}, creator: $creator, status: ${streamTask.getStatus}]")
+ streamTaskMapper.insertTask(streamTask)
+ streamTask
} else {
- throw new JobTaskErrorException(-1, s"Unable to create new task, StreamTask [${latestTask.getId}] is still " +
- s"not completed for job [id: ${job.getId}, name: ${job.getName}]")
+ throw new JobTaskErrorException(-1, s"Unable to create new task, StreamTask [${latestTask.getId}] is still " +
+ s"not completed for job [id: ${job.getId}, name: ${job.getName}]")
}
}
}
}
+ override def updateTask(streamTask: StreamTask): Unit = streamTaskMapper.updateTask(streamTask)
+
/**
* Just launch task by task id
*
@@ -571,6 +604,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
if (null == streamJob){
throw new JobExecuteErrorException(-1, s"Not found the related job info in [${streamTask.getJobId}], has been dropped it ?")
}
+ //todo 在前置的streamJob创建时设置
info(s"Start to find the transform builder to process the StreamJob [${streamJob.getName}]")
val transformJob = streamisTransformJobBuilders.find(_.canBuild(streamJob)).map(_.build(streamJob))
.getOrElse(throw new TransformFailedErrorException(30408, s"Cannot find a TransformJobBuilder to build StreamJob ${streamJob.getName}."))
@@ -580,7 +614,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
launchJob = Transform.getTransforms.foldLeft(launchJob)((job, transform) => transform.transform(transformJob, job))
info(s"StreamJob [${streamJob.getName}] has transformed with launchJob $launchJob, now to launch it.")
//TODO getLinkisJobManager should use jobManagerType to instance in future, since not only `simpleFlink` mode is supported in future.
- val jobClient = jobLaunchManager.launch(launchJob, state)
+ val jobClient = getJobLaunchManager(streamTask).launch(launchJob, state)
// Refresh and store the information from JobClient
Utils.tryCatch {
// Refresh the job info(If the job shutdown immediately)
@@ -591,7 +625,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
// First to store the launched task info
streamTaskMapper.updateTask(streamTask)
info(s"StreamJob [${streamJob.getName}] is ${jobInfo.getStatus} with $jobInfo.")
- if (FLINK_JOB_STATUS_FAILED.getValue == streamTask.getStatus){
+ if (JobConf.FLINK_JOB_STATUS_FAILED.getValue == streamTask.getStatus){
throw new JobExecuteErrorException(-1, s"(提交流式应用状态失败, 请检查日志), errorDesc: ${streamTask.getErrDesc}")
}
// Drop the temporary configuration
@@ -600,7 +634,7 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
warn(s"Fail to delete the temporary configuration for job [${streamTask.getJobId}], task [${streamTask.getId}]", e)
})
}{case e: Exception =>
- val message = s"Error occurred when to refresh and store the info of StreamJob [${streamJob.getName}] with JobClient"
+ val message = s"Error occurred when to refresh and store the info of StreamJob [${streamJob.getName}] with JobClient. ${e.getMessage}"
warn(s"$message, stop and destroy the Client connection.")
// Stop the JobClient directly
Utils.tryAndWarn(jobClient.stop())
@@ -663,44 +697,102 @@ class DefaultStreamTaskService extends StreamTaskService with Logging{
getStateInfo(this.streamTaskMapper.getTaskById(taskId))
}
- private def getStateInfo(streamTask: StreamTask): JobState = {
+ override def getStateInfo(streamTask: StreamTask): JobState = {
Option(streamTask) match {
case Some(task) =>
if (StringUtils.isNotBlank(task.getLinkisJobId)) {
- info(s"Try to restore the JobState form taskId [${task.getId}], fetch the state information.")
- // Connect to get the JobInfo
- val jobClient = this.jobLaunchManager.connect(task.getLinkisJobId, task.getLinkisJobInfo)
- val jobInfo = jobClient.getJobInfo
- // Get the JobStateManager
- val jobStateManager = this.jobLaunchManager.getJobStateManager
val stateList: util.List[JobState] = new util.ArrayList[JobState]()
- // First to fetch the latest Savepoint information
- Option(jobStateManager.getJobState[Savepoint](classOf[Savepoint], jobInfo)).foreach(savepoint => stateList.add(savepoint))
- // Determinate if need the checkpoint information
- this.streamJobConfMapper.getRawConfValue(task.getJobId, JobConfKeyConstants.CHECKPOINT_SWITCH.getValue) match {
- case "ON" =>
- // Then to fetch the latest Checkpoint information
- Option(jobStateManager.getJobState[Checkpoint](classOf[Checkpoint], jobInfo)).foreach(checkpoint => stateList.add(checkpoint))
+ // Connect to get the JobInfo
+ getJobLaunchManager(task) match {
+ case jobLaunchManager: SimpleFlinkJobLaunchManager =>
+ // Only support to fetch state information for Flink stream task
+ logger.info(s"Try to fetch and choose the state information from [${task.getId}].")
+ val jobClient = jobLaunchManager.connect(task.getLinkisJobId, task.getLinkisJobInfo)
+ val jobInfo = jobClient.getJobInfo
+ // Get the JobStateManager
+ val jobStateManager = jobLaunchManager.getJobStateManager
+ // First to fetch the latest Savepoint information
+ Option(jobStateManager.getJobState[FlinkSavepoint](classOf[FlinkSavepoint], jobInfo)).foreach(savepoint => stateList.add(savepoint))
+ // Determinate if need the checkpoint information
+ this.streamJobConfMapper.getRawConfValue(task.getJobId, JobConfKeyConstants.CHECKPOINT_SWITCH.getValue) match {
+ case "ON" =>
+ // Then to fetch the latest Checkpoint information
+ Option(jobStateManager.getJobState[FlinkCheckpoint](classOf[FlinkCheckpoint], jobInfo)).foreach(checkpoint => stateList.add(checkpoint))
+ case _ =>
+ }
+ // At last fetch the state information from storage
+ Option(jobInfo.getJobStates).foreach(stateInfoList => {
+ stateInfoList.foreach(stateInfo => {
+ val jobState = new JobGenericState(stateInfo.getLocation)
+ jobState.setToRestore(stateInfo.isRestore)
+ jobState.setTimestamp(stateInfo.getTimestamp)
+ stateList.add(jobState)
+ })
+ })
case _ =>
}
- // Fetch the job state info in jobInfo at last
-// Option(jobInfo.getJobStates).foreach(states => states.foreach(state => {
-// val savepoint = new Savepoint(state.getLocation)
-// savepoint.setTimestamp(state.getTimestamp)
-// stateList.add(savepoint)
-// }))
if (!stateList.isEmpty){
// Choose the newest job state
val finalState = stateList.asScala.maxBy(_.getTimestamp)
- info(s"Final choose the JobState: [${finalState.getLocation}] to restore the StreamJob")
+ // For candidate, set the restore flag as false
+ finalState.setToRestore(false)
+ logger.info(s"Final choose the JobState: [${finalState.getLocation}] as the candidate for restoring the StreamJob")
return finalState
}
} else {
-
+ // get jobInfo from linkis
+ throw new JobFetchErrorException(30030, s"task ${task.getId} got null linkisjobId.")
}
null
case _ => null
}
}
+ override def getJobDetailsVO(streamJob: StreamJob, version: String): JobDetailsVo = {
+ val flinkJobInfo = getTaskJobInfo(streamJob.getId, version)
+ val jobStateInfos = flinkJobInfo.getJobStates
+ var manageMod = StreamJobMode.ENGINE_CONN
+ if (JobConf.isCompleted(streamJob.getStatus)) {
+ // should read param
+ val value = streamJobConfMapper.getRawConfValue(streamJob.getId, JobConfKeyConstants.MANAGE_MODE_KEY.getValue)
+ manageMod = Option(Utils.tryAndWarn(StreamJobMode.valueOf(value))).getOrElse(StreamJobMode.ENGINE_CONN)
+ } else {
+ manageMod = Option(streamJobMapper.getJobVersionById(streamJob.getId, version)) match {
+ case Some(jobVersion) =>
+ Option(Utils.tryQuietly(StreamJobMode
+ .valueOf(jobVersion.getManageMode))).getOrElse(StreamJobMode.ENGINE_CONN)
+ case _ => StreamJobMode.ENGINE_CONN
+ }
+ }
+
+ val metricsStr = if (JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES.getValue.contains(streamJob.getJobType)) null
+ else if(jobStateInfos == null || jobStateInfos.length == 0) null
+ else jobStateInfos(0).getLocation
+ taskMetricsParser.find(_.canParse(streamJob)).map(_.parse(metricsStr)).filter { jobDetailsVO =>
+ jobDetailsVO.setLinkisJobInfo(flinkJobInfo)
+ jobDetailsVO.setManageMode(manageMod.getClientType)
+ true
+ }.getOrElse(throw new JobFetchErrorException(30030, s"Cannot find a TaskMetricsParser to parse job details."))
+ }
+
+ /**
+ * Fetch the suitable job launch manager
+ * @param streamTask stream task
+ * @return
+ */
+ private def getJobLaunchManager(streamTask: StreamTask): JobLaunchManager[_ <: JobInfo] = {
+ Option(streamTask.getJobType) match {
+ case Some(jobType) =>
+ var launchType = jobType
+ if (launchType.indexOf(".") > 0){
+ launchType = launchType.substring(0, launchType.indexOf("."))
+ }
+ val manager = JobLaunchManager.getJobManager(launchType.toLowerCase)
+ if (null == manager){
+ throw new JobErrorException(-1, s"Cannot find the suitable job launch manager for jobType: [${jobType}]")
+ }
+ manager
+ case _ => this.jobLaunchManager
+ }
+ }
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobInspectService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobInspectService.scala
new file mode 100644
index 000000000..8be02cf6b
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobInspectService.scala
@@ -0,0 +1,14 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.service
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobInspectVo
+
+import java.util
+
+trait StreamJobInspectService {
+ /**
+ * Inspect method
+ * @param jobId job id
+ * @param types type list for inspecting
+ * @return
+ */
+ def inspect(jobId: Long, types: Array[JobInspectVo.Types]): util.List[JobInspectVo]
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala
index 5f27f0864..2d7c32c76 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamJobService.scala
@@ -15,18 +15,31 @@ trait StreamJobService {
def getJobById(jobId: Long): StreamJob
+
+ def getJobByName(jobName: String): util.List[StreamJob]
+
/**
* Page list query
+ *
* @param projectName project name
- * @param jobName job name
- * @param jobStatus job status
- * @param jobCreator job creator
+ * @param jobName job name
+ * @param jobStatus job status
+ * @param jobCreator job creator
* @return
*/
- def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String): PageInfo[QueryJobListVo]
+ def getByProList(projectName: String, userName: String, jobName: String, jobStatus: Integer, jobCreator: String, label: String): PageInfo[QueryJobListVo]
+
+ /**
+ * Page list query of version info
+ *
+ * @param jobId job id
+ * @return
+ */
+ def getVersionList(jobId: Long): PageInfo[VersionDetailVo]
/**
* Count core norm
+ *
* @param projectName project name
* @return
*/
@@ -34,37 +47,43 @@ trait StreamJobService {
/**
* Version detail information
- * @param jobId job id
+ *
+ * @param jobId job id
* @param version version
*/
def versionDetail(jobId: Long, version: String): VersionDetailVo
/**
- * Update version
+ * Rolling job version
+ *
* @param preVersion version
*/
- def updateVersion(preVersion: String): String
+ def rollingJobVersion(preVersion: String): String
/**
* Upload files
+ *
* @param metaJsonInfo meta json
- * @param version version
- * @param path path
+ * @param version version
+ * @param path path
*/
def uploadFiles(metaJsonInfo: MetaJsonInfo, version: StreamJobVersion, path: String): Unit
/**
- * Create stream job
- * @param metaJsonInfo meta json
- * @param userName username
+ * Deploy stream job
+ *
+ * @param metaJsonInfo meta json
+ * @param userName username
+ * @param updateVersion should update version
* @return
*/
- def createStreamJob(metaJsonInfo: MetaJsonInfo, userName: String): StreamJobVersion
+ def deployStreamJob(streamJob: StreamJob, metaJsonInfo: MetaJsonInfo, userName: String, updateVersion: Boolean): StreamJobVersion
/**
* Upload job
- * @param projectName project name
- * @param userName username
+ *
+ * @param projectName project name
+ * @param userName username
* @param inputZipPath input zip path
* @return
*/
@@ -72,7 +91,8 @@ trait StreamJobService {
/**
* Create or update job with meta json
- * @param userName username
+ *
+ * @param userName username
* @param metaJsonInfo meta json
* @return
*/
@@ -80,7 +100,8 @@ trait StreamJobService {
/**
* Get job content
- * @param jobId job id
+ *
+ * @param jobId job id
* @param version version
* @return
*/
@@ -88,7 +109,8 @@ trait StreamJobService {
/**
* Has permission
- * @param jobId job id
+ *
+ * @param jobId job id
* @param username username
* @return
*/
@@ -98,6 +120,7 @@ trait StreamJobService {
/**
* Alert user
+ *
* @param job stream job
* @return
*/
@@ -105,6 +128,7 @@ trait StreamJobService {
/**
* Alert level
+ *
* @param job stream job
* @return
*/
@@ -112,7 +136,8 @@ trait StreamJobService {
/**
* Is creator
- * @param jobId job id
+ *
+ * @param jobId job id
* @param username username
* @return
*/
@@ -120,10 +145,15 @@ trait StreamJobService {
/**
* List alert message list
+ *
* @param username username
- * @param jobId job id
- * @param version version
+ * @param jobId job id
+ * @param version version
* @return
*/
def getAlert(username: String, jobId: Long, version: String): util.List[StreamAlertRecord]
+
+ def updateLabel(streamJob: StreamJob): Unit
+
+ def getLinkisFlinkAlertLevel(job: StreamJob): AlertLevel
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala
index 079a8d8f8..296ebb3ef 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/StreamTaskService.scala
@@ -17,10 +17,9 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.service
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.state.JobState
import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.entity.LogRequestPayload
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.FlinkJobInfo
-import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask
-import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobProgressVo, JobStatusVo, PauseResultVo, StreamTaskListVo}
-
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.EngineConnJobInfo
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask}
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.{ExecResultVo, JobDetailsVo, JobProgressVo, JobStatusVo, PauseResultVo, StreamTaskListVo}
import java.util
import java.util.concurrent.Future
/**
@@ -28,6 +27,10 @@ import java.util.concurrent.Future
*/
trait StreamTaskService {
+
+ def getTaskById(Id: Long): StreamTask
+
+
/**
* Sync to execute job(task)
* 1) create a new task
@@ -87,6 +90,8 @@ trait StreamTaskService {
*/
def launch(taskId: Long, execUser: String): Unit
+ def getLatestTaskByJobId(jobId: Long): StreamTask
+
/**
* Create new task use the latest job version
* @param jobId job id
@@ -95,6 +100,8 @@ trait StreamTaskService {
*/
def createTask(jobId: Long, status: Int, creator: String): StreamTask
+ def updateTask(streamTask: StreamTask): Unit
+
/**
* Update the task status
* @param jobId job id
@@ -146,9 +153,13 @@ trait StreamTaskService {
* @param version version
* @return
*/
- def getTask(jobId: Long, version: String): FlinkJobInfo
+ def getTaskJobInfo(jobId: Long, version: String): EngineConnJobInfo
def getStateInfo(taskId: Long): JobState
+ def getStateInfo(streamTask: StreamTask): JobState
+
+ def getJobDetailsVO(streamJob: StreamJob, version: String): JobDetailsVo
+
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala
index edf36bb94..2788eae1a 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/service/TaskMonitorService.scala
@@ -18,21 +18,21 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.service
import java.util
import java.util.Date
import java.util.concurrent.{Future, TimeUnit}
-import com.google.common.collect.Sets
import com.webank.wedatasphere.streamis.jobmanager.launcher.JobLauncherAutoConfiguration
import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
import com.webank.wedatasphere.streamis.jobmanager.launcher.dao.StreamJobConfMapper
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.{FlinkJobInfo, LinkisJobInfo}
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.jobInfo.{EngineConnJobInfo, LinkisJobInfo}
import com.webank.wedatasphere.streamis.jobmanager.manager.alert.{AlertLevel, Alerter}
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf
import com.webank.wedatasphere.streamis.jobmanager.manager.dao.{StreamJobMapper, StreamTaskMapper}
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamTask}
import com.webank.wedatasphere.streamis.jobmanager.manager.utils.StreamTaskUtils
import javax.annotation.{PostConstruct, PreDestroy, Resource}
import org.apache.commons.lang.exception.ExceptionUtils
+import org.apache.commons.lang3.StringUtils
import org.apache.linkis.common.exception.ErrorException
import org.apache.linkis.common.utils.{Logging, RetryHandler, Utils}
import org.springframework.beans.factory.annotation.Autowired
@@ -84,58 +84,63 @@ class TaskMonitorService extends Logging {
return
}
streamTasks.filter(shouldMonitor).foreach { streamTask =>
- streamTask.setLastUpdateTime(new Date)
- streamTaskMapper.updateTask(streamTask)
val job = streamJobMapper.getJobById(streamTask.getJobId)
- info(s"Try to update status of StreamJob-${job.getName}.")
- val retryHandler = new RetryHandler {}
- retryHandler.setRetryNum(3)
- retryHandler.setRetryMaxPeriod(2000)
- retryHandler.addRetryException(classOf[ErrorException])
- var jobInfo:JobInfo = null
- Utils.tryCatch {
- jobInfo = retryHandler.retry(refresh(streamTask, jobLaunchManager), s"Task-Monitor-${job.getName}")
- } { ex => {
- error(s"Fetch StreamJob-${job.getName} failed, maybe the Linkis cluster is wrong, please be noticed!", ex)
- val errorMsg = ExceptionUtils.getRootCauseMessage(ex)
- if (errorMsg != null && errorMsg.contains("Not exists EngineConn")) {
- streamTask.setStatus(JobConf.FLINK_JOB_STATUS_FAILED.getValue)
- streamTask.setErrDesc("Not exists EngineConn.")
- } else {
- // 连续三次还是出现异常,说明Linkis的Manager已经不能正常提供服务,告警并不再尝试获取状态,等待下次尝试
- val users = getAlertUsers(job)
- users.add(job.getCreateBy)
- alert(jobService.getAlertLevel(job), s"请求LinkisManager失败,Linkis集群出现异常,请关注!影响任务[${job.getName}]", users, streamTask)
+ if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES.getValue.contains(job.getJobType)) {
+ val userList = getAlertUsers(job)
+ //user
+ val alertMsg = s"Spark Streaming应用[${job.getName}]已经超过 ${Utils.msDurationToString(System.currentTimeMillis - streamTask.getLastUpdateTime.getTime)} 没有更新状态, 请及时确认应用是否正常!"
+ alert(jobService.getAlertLevel(job), alertMsg, userList, streamTask)
+ } else {
+ streamTask.setLastUpdateTime(new Date)
+ streamTaskMapper.updateTask(streamTask)
+ info(s"Try to update status of StreamJob-${job.getName}.")
+ val retryHandler = new RetryHandler {}
+ retryHandler.setRetryNum(3)
+ retryHandler.setRetryMaxPeriod(2000)
+ retryHandler.addRetryException(classOf[ErrorException])
+ var jobInfo:JobInfo = null
+ Utils.tryCatch {
+ jobInfo = retryHandler.retry(refresh(streamTask, jobLaunchManager), s"Task-Monitor-${job.getName}")
+ } { ex =>
+ error(s"Fetch StreamJob-${job.getName} failed, maybe the Linkis cluster is wrong, please be noticed!", ex)
+ val errorMsg = ExceptionUtils.getRootCauseMessage(ex)
+ if (errorMsg != null && errorMsg.contains("Not exists EngineConn")) {
+ streamTask.setStatus(JobConf.FLINK_JOB_STATUS_FAILED.getValue)
+ streamTask.setErrDesc("Not exists EngineConn.")
+ } else {
+ // 连续三次还是出现异常,说明Linkis的Manager已经不能正常提供服务,告警并不再尝试获取状态,等待下次尝试
+ val users = getAdminAlertUsers()
+ alert(jobService.getLinkisFlinkAlertLevel(job), s"请求LinkisManager失败,Linkis集群出现异常,请关注!影响任务[${job.getName}]", users, streamTask)
+ }
}
- }
- }
- streamTaskMapper.updateTask(streamTask)
- if(streamTask.getStatus == JobConf.FLINK_JOB_STATUS_FAILED.getValue) {
- warn(s"StreamJob-${job.getName} is failed, please be noticed.")
- var extraMessage = ""
- Option(jobInfo) match {
- case Some(flinkJobInfo: FlinkJobInfo) =>
- extraMessage = s",${flinkJobInfo.getApplicationId}"
- case _ =>
+ streamTaskMapper.updateTask(streamTask)
+ if(streamTask.getStatus == JobConf.FLINK_JOB_STATUS_FAILED.getValue) {
+ warn(s"StreamJob-${job.getName} is failed, please be noticed.")
+ var extraMessage = ""
+ Option(jobInfo) match {
+ case Some(flinkJobInfo: EngineConnJobInfo) =>
+ extraMessage = s",${flinkJobInfo.getApplicationId}"
+ case _ =>
+ }
+ // Need to add restart feature if user sets the restart parameters.
+ var alertMsg = s"Streamis 流式应用[${job.getName}${extraMessage}]已经失败, 请登陆Streamis查看应用日志."
+ this.streamJobConfMapper.getRawConfValue(job.getId, JobConfKeyConstants.FAIL_RESTART_SWITCH.getValue) match {
+ case "ON" =>
+ alertMsg = s"${alertMsg} 现将自动拉起该应用"
+ Utils.tryCatch{
+ info(s"Start to reLaunch the StreamisJob [${job.getName}], now to submit and schedule it...")
+ // Use submit user to start job
+ val startAutoRestoreSwitch = "ON".equals(this.streamJobConfMapper.getRawConfValue(job.getId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH.getValue))
+ val future: Future[String] = streamTaskService.asyncExecute(job.getId, 0L, job.getSubmitUser, startAutoRestoreSwitch)
+ }{
+ case e:Exception =>
+ warn(s"Fail to reLaunch the StreamisJob [${job.getName}]", e)
+ }
+ case _ =>
+ }
+ val userList = getAlertUsers(job)
+ alert(jobService.getAlertLevel(job), alertMsg, userList, streamTask)
}
- // Need to add restart feature if user sets the restart parameters.
- var alertMsg = s"Streamis 流式应用[${job.getName}${extraMessage}]已经失败, 请登陆Streamis查看应用日志."
- this.streamJobConfMapper.getRawConfValue(job.getId, JobConfKeyConstants.FAIL_RESTART_SWITCH.getValue) match {
- case "ON" =>
- alertMsg = s"${alertMsg} 现将自动拉起该应用"
- Utils.tryCatch{
- info(s"Start to reLaunch the StreamisJob [${job.getName}], now to submit and schedule it...")
- // Use submit user to start job
- val future: Future[String] = streamTaskService.asyncExecute(job.getId, 0L, job.getSubmitUser, true)
- }{
- case e:Exception =>
- warn(s"Fail to reLaunch the StreamisJob [${job.getName}]", e)
- }
- case _ =>
- }
- val userList = Sets.newHashSet(job.getSubmitUser, job.getCreateBy)
- userList.addAll(getAlertUsers(job))
- alert(jobService.getAlertLevel(job), alertMsg, new util.ArrayList[String](userList), streamTask)
}
}
info("All StreamTasks status have updated.")
@@ -153,12 +158,30 @@ class TaskMonitorService extends Logging {
}
protected def getAlertUsers(job: StreamJob): util.List[String] = {
- var users = jobService.getAlertUsers(job)
- if (users == null) {
- users = new util.ArrayList[String]()
+ val allUsers = new util.LinkedHashSet[String]()
+ val alertUsers = jobService.getAlertUsers(job)
+ var isValid = false
+ if (alertUsers!= null) {
+ alertUsers.foreach(user => {
+ if (StringUtils.isNotBlank(user) && !user.toLowerCase().contains("hduser")) {
+ isValid = true
+ allUsers.add(user)
+ }
+ })
}
- users.addAll(util.Arrays.asList(JobConf.STREAMIS_DEVELOPER.getValue.split(","):_*))
- users
+ if (!isValid){
+ allUsers.add(job.getSubmitUser)
+ allUsers.add(job.getCreateBy)
+ }
+ new util.ArrayList[String](allUsers)
+ }
+
+ protected def getAdminAlertUsers(): util.List[String] = {
+ val allUsers = new util.LinkedHashSet[String]()
+ util.Arrays.asList(JobConf.STREAMIS_DEVELOPER.getHotValue().split(","):_*).foreach(user => {
+ allUsers.add(user)
+ })
+ new util.ArrayList[String](allUsers)
}
protected def alert(alertLevel: AlertLevel, alertMsg: String, users: util.List[String], streamTask:StreamTask): Unit = alerters.foreach{ alerter =>
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala
index 918bf2754..fd2620a3b 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/ConfigTransform.scala
@@ -27,11 +27,11 @@ import java.util
trait ConfigTransform extends Transform {
override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = {
- val config: util.Map[String, Any] = streamisTransformJob.getConfigMap
+ val config: util.Map[String, AnyRef] = streamisTransformJob.getConfigMap
val group = configGroup()
if (StringUtils.isNotBlank(group)){
Option(config.get(group)) match {
- case Some(valueSet: util.Map[String, Any]) =>
+ case Some(valueSet: util.Map[String, AnyRef]) =>
transform(valueSet, job)
case _ => job
}
@@ -44,6 +44,6 @@ trait ConfigTransform extends Transform {
*/
protected def configGroup(): String = null
- protected def transform(valueSet: util.Map[String, Any], job: LaunchJob): LaunchJob
+ protected def transform(valueSet: util.Map[String, AnyRef], job: LaunchJob): LaunchJob
}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala
index 0f96016d2..2c3f58856 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/StreamisJobContentTransform.scala
@@ -20,18 +20,15 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
import java.util
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisTransformJob, StreamisTransformJobContent}
-
-
trait StreamisJobContentTransform extends Transform {
override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = {
val jobContent = transformJobContent(streamisTransformJob.getStreamisTransformJobContent)
if(jobContent != null) {
- jobContent.put("runType", streamisTransformJob.getStreamisJobEngineConn.getRunType.toString)
+ jobContent.put("runType", streamisTransformJob.getStreamisJobConnect.getRunType.toString)
LaunchJob.builder().setLaunchJob(job).setJobContent(jobContent).build()
} else job
}
-
- protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, Any]
+ protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, AnyRef]
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala
new file mode 100644
index 000000000..1e7ad7b68
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/TaskMetricsParser.scala
@@ -0,0 +1,18 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo
+
+/**
+ *
+ * @date 2022-10-21
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+trait TaskMetricsParser {
+
+ def canParse(streamJob: StreamJob): Boolean
+
+ def parse(metrics: String): JobDetailsVo
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala
index 039fbd3e2..5ea152614 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/AbstractStreamisTransformJobBuilder.scala
@@ -15,15 +15,19 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.builder
+import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
import org.apache.linkis.common.conf.CommonVars
import org.apache.linkis.manager.label.entity.engine.RunType.RunType
import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService
import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.StreamisTransformJobBuilder
-import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJobEngineConnImpl, StreamisTransformJob, StreamisTransformJobContent, StreamisTransformJobImpl}
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJobConnect, StreamisJobConnectImpl, StreamisJobEngineConnImpl, StreamisTransformJob, StreamisTransformJobContent, StreamisTransformJobImpl}
import org.springframework.beans.factory.annotation.Autowired
+import java.util
+import scala.collection.JavaConverters.mapAsJavaMapConverter
/**
* Created by enjoyyin on 2021/9/22.
*/
@@ -39,8 +43,13 @@ abstract class AbstractStreamisTransformJobBuilder extends StreamisTransformJobB
override def build(streamJob: StreamJob): StreamisTransformJob = {
val transformJob = createStreamisTransformJob()
transformJob.setStreamJob(streamJob)
- transformJob.setConfigMap(streamJobConfService.getJobConfig(streamJob.getId))
-// transformJob.setConfig(configurationService.getFullTree(streamJob.getId))
+ val jobConfig: util.Map[String, AnyRef] = Option(streamJobConfService.getJobConfig(streamJob.getId))
+ .getOrElse(new util.HashMap[String, AnyRef]())
+ // Put and overwrite internal group, users cannot customize the internal configuration
+ val internalGroup = new util.HashMap[String, AnyRef]()
+ jobConfig.put(JobConfKeyConstants.GROUP_INTERNAL.getValue, internalGroup)
+ internalLogConfig(internalGroup)
+ transformJob.setConfigMap(jobConfig)
val streamJobVersions = streamJobMapper.getJobVersions(streamJob.getId)
// 无需判断streamJobVersions是否非空,因为TaskService已经判断了
transformJob.setStreamJobVersion(streamJobVersions.get(0))
@@ -48,9 +57,17 @@ abstract class AbstractStreamisTransformJobBuilder extends StreamisTransformJobB
transformJob
}
+ /**
+ * Log internal configuration
+ * @param internal internal config group
+ */
+ private def internalLogConfig(internal: util.Map[String, AnyRef]): Unit = {
+ internal.put(JobConf.STREAMIS_JOB_LOG_GATEWAY.key, JobConf.STREAMIS_JOB_LOG_GATEWAY.getValue)
+ internal.put(JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.key, JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.getValue)
+ }
}
-abstract class AbstractFlinkStreamisTransformJobBuilder extends AbstractStreamisTransformJobBuilder{
+abstract class AbstractDefaultStreamisTransformJobBuilder extends AbstractStreamisTransformJobBuilder{
private val flinkVersion = CommonVars("wds.streamis.flink.submit.version", "1.12.2").getValue
@@ -60,8 +77,11 @@ abstract class AbstractFlinkStreamisTransformJobBuilder extends AbstractStreamis
case transformJob: StreamisTransformJobImpl =>
val engineConn = new StreamisJobEngineConnImpl
engineConn.setEngineConnType("flink-" + flinkVersion)
- engineConn.setRunType(getRunType(transformJob))
transformJob.setStreamisJobEngineConn(engineConn)
+ val streamisJobConnect = new StreamisJobConnectImpl
+ streamisJobConnect.setRunType(getRunType(transformJob))
+ streamisJobConnect.setRunEngineVersion(flinkVersion)
+ transformJob.setStreamisJobConnect(streamisJobConnect)
transformJob
case job => job
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/StreamisFlinkTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/DefaultStreamisTransformJobBuilder.scala
similarity index 91%
rename from streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/StreamisFlinkTransformJobBuilder.scala
rename to streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/DefaultStreamisTransformJobBuilder.scala
index 838e9c417..49370ff8f 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/StreamisFlinkTransformJobBuilder.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/DefaultStreamisTransformJobBuilder.scala
@@ -15,9 +15,9 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.builder
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
import org.apache.linkis.manager.label.entity.engine.RunType.RunType
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisTransformJob, StreamisTransformJobContent}
import org.springframework.beans.factory.annotation.Autowired
@@ -25,7 +25,7 @@ import org.springframework.stereotype.Component
@Component
-class StreamisFlinkTransformJobBuilder extends AbstractFlinkStreamisTransformJobBuilder {
+class DefaultStreamisTransformJobBuilder extends AbstractDefaultStreamisTransformJobBuilder {
@Autowired private var jobContentParsers: Array[JobContentParser] = _
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/SparkStreamisTransformJobBuilder.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/SparkStreamisTransformJobBuilder.scala
new file mode 100644
index 000000000..969e29875
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/builder/SparkStreamisTransformJobBuilder.scala
@@ -0,0 +1,26 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.builder
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisTransformJob, StreamisTransformJobContent}
+import org.apache.linkis.manager.label.entity.engine.RunType.RunType
+import org.springframework.beans.factory.annotation.Autowired
+import org.springframework.stereotype.Component
+
+@Component
+class SparkStreamisTransformJobBuilder extends AbstractDefaultStreamisTransformJobBuilder {
+
+ @Autowired private var jobContentParsers: Array[JobContentParser] = _
+
+ override def canBuild(streamJob: StreamJob): Boolean = jobContentParsers.map(_.jobType).contains(streamJob.getJobType.toLowerCase)
+
+ override protected def getRunType(transformJob: StreamisTransformJob): RunType =
+ jobContentParsers.find(_.jobType == transformJob.getStreamJob.getJobType.toLowerCase).map(_.runType).get
+
+ override protected def createStreamisTransformJobContent(transformJob: StreamisTransformJob): StreamisTransformJobContent =
+ jobContentParsers.find(_.canParse(transformJob.getStreamJob, transformJob.getStreamJobVersion))
+ .map(_.parseTo(transformJob.getStreamJob, transformJob.getStreamJobVersion))
+ .getOrElse(throw new JobExecuteErrorException(30350, "Not support jobContent " + transformJob.getStreamJobVersion.getJobContent))
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobConnect.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobConnect.scala
new file mode 100644
index 000000000..20aa5b79f
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobConnect.scala
@@ -0,0 +1,32 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity
+
+import org.apache.linkis.manager.label.entity.engine.RunType.RunType
+
+trait StreamisJobConnect {
+ /**
+ * like: flink
+ *
+ * @return
+ */
+ def getRunType: RunType
+
+ /**
+ * like: 1.12.2
+ *
+ * @return
+ */
+ def getRunEngineVersion: String
+}
+
+class StreamisJobConnectImpl extends StreamisJobConnect {
+
+ private var runEngineVersion: String = _
+ private var runType: RunType = _
+
+ def setRunType(runType: RunType): Unit = this.runType = runType
+ override def getRunType: RunType = runType
+
+ override def getRunEngineVersion: String = runEngineVersion
+ def setRunEngineVersion(runEngineVersion: String): Unit = this.runEngineVersion = runEngineVersion
+
+}
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala
index d9d286ebb..223b2ebec 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisJobEngineConn.scala
@@ -17,7 +17,7 @@ package com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity
import org.apache.linkis.manager.label.entity.engine.RunType._
-
+@Deprecated
trait StreamisJobEngineConn {
def getRunType: RunType
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala
index f4af773ff..420d65378 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJob.scala
@@ -26,10 +26,13 @@ trait StreamisTransformJob {
def getStreamJobVersion: StreamJobVersion
- def getConfigMap: util.Map[String, Any]
+ def getConfigMap: util.Map[String, AnyRef]
+ @deprecated
def getStreamisJobEngineConn: StreamisJobEngineConn
+ def getStreamisJobConnect: StreamisJobConnect
+
def getStreamisTransformJobContent: StreamisTransformJobContent
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala
index 7a6daa325..5e5522ddf 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/entity/StreamisTransformJobImpl.scala
@@ -24,8 +24,10 @@ class StreamisTransformJobImpl extends StreamisTransformJob {
private var streamJob: StreamJob = _
private var streamJobVersion: StreamJobVersion = _
- private var configMap: util.Map[String, Any] = _
+ private var configMap: util.Map[String, AnyRef] = _
+ @deprecated
private var streamisJobEngineConn: StreamisJobEngineConn = _
+ private var streamisJobConnect: StreamisJobConnect = _
private var streamisTransformJobContent: StreamisTransformJobContent = _
override def getStreamJob: StreamJob = streamJob
@@ -34,16 +36,20 @@ class StreamisTransformJobImpl extends StreamisTransformJob {
override def getStreamJobVersion: StreamJobVersion = streamJobVersion
def setStreamJobVersion(streamJobVersion: StreamJobVersion): Unit = this.streamJobVersion = streamJobVersion
+ @deprecated
override def getStreamisJobEngineConn: StreamisJobEngineConn = streamisJobEngineConn
def setStreamisJobEngineConn(streamisJobEngineConn: StreamisJobEngineConn): Unit = this.streamisJobEngineConn = streamisJobEngineConn
+ override def getStreamisJobConnect: StreamisJobConnect = streamisJobConnect
+ def setStreamisJobConnect(streamisJobConnect: StreamisJobConnect): Unit = this.streamisJobConnect = streamisJobConnect
+
override def getStreamisTransformJobContent: StreamisTransformJobContent = streamisTransformJobContent
def setStreamisTransformJobContent(streamisTransformJobContent: StreamisTransformJobContent): Unit =
this.streamisTransformJobContent = streamisTransformJobContent
- override def getConfigMap: util.Map[String, Any] =this.configMap
+ override def getConfigMap: util.Map[String, AnyRef] =this.configMap
- def setConfigMap(mapValue: util.Map[String, Any]): Unit = {
+ def setConfigMap(mapValue: util.Map[String, AnyRef]): Unit = {
this.configMap = mapValue
}
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala
index 01426f656..4347c285e 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkCheckpointConfigTransform.scala
@@ -21,8 +21,9 @@ import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConst
import java.util
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.manager.JobLaunchManager
-import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.Checkpoint
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.job.state.FlinkCheckpoint
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkCheckpointConfigTransform.CHECKPOINT_PATH_CONFIG_NAME
+import org.apache.linkis.common.conf.CommonVars
import org.apache.linkis.common.utils.Logging
import scala.collection.JavaConverters._
@@ -40,12 +41,12 @@ class FlinkCheckpointConfigTransform extends FlinkConfigTransform with Logging{
*/
override protected def configGroup(): String = JobConfKeyConstants.GROUP_PRODUCE.getValue
- override protected def transform(produceConfig: util.Map[String, Any], job: LaunchJob): LaunchJob = {
+ override protected def transform(produceConfig: util.Map[String, AnyRef], job: LaunchJob): LaunchJob = {
produceConfig.get(JobConfKeyConstants.CHECKPOINT_SWITCH.getValue) match {
case "ON" =>
- val checkpointConfig: util.Map[String, Any] = new util.HashMap[String, Any]()
+ val checkpointConfig: util.Map[String, AnyRef] = new util.HashMap[String, AnyRef]()
val jobLaunchManager = JobLaunchManager.getJobManager(JobLauncherAutoConfiguration.DEFAULT_JOB_LAUNCH_MANGER)
- val checkpointPath = jobLaunchManager.getJobStateManager.getJobStateDir(classOf[Checkpoint], job.getJobName)
+ val checkpointPath = jobLaunchManager.getJobStateManager.getJobStateDir(classOf[FlinkCheckpoint], job.getJobName)
checkpointConfig.put(FlinkConfigTransform.FLINK_CONFIG_PREFIX + CHECKPOINT_PATH_CONFIG_NAME, checkpointPath)
info(s"Use the checkpoint dir, ${CHECKPOINT_PATH_CONFIG_NAME} => ${checkpointPath}")
produceConfig.asScala.filter(_._1.startsWith(JobConfKeyConstants.CHECKPOINT.getValue))
@@ -62,5 +63,5 @@ class FlinkCheckpointConfigTransform extends FlinkConfigTransform with Logging{
}
object FlinkCheckpointConfigTransform{
- val CHECKPOINT_PATH_CONFIG_NAME = "state.checkpoints.dir"
+ private val CHECKPOINT_PATH_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.checkpoint-path", "state.checkpoints.dir").getValue
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala
index e44552f87..dd4868c15 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkConfigTransform.scala
@@ -27,13 +27,13 @@ import java.util
*/
abstract class FlinkConfigTransform extends ConfigTransform {
- protected def transformConfig(getConfig: => util.Map[String, Any], job: LaunchJob): LaunchJob = {
- val startupMap = new util.HashMap[String, Any]
+ protected def transformConfig(getConfig: => util.Map[String, AnyRef], job: LaunchJob): LaunchJob = {
+ val startupMap = new util.HashMap[String, AnyRef]
Option(getConfig).foreach(configSeq => configSeq.foreach{
case (key, value) => startupMap.put(key, value)
case _ =>
})
- val params = if(job.getParams == null) new util.HashMap[String, Any] else job.getParams
+ val params = if(job.getParams == null) new util.HashMap[String, AnyRef] else job.getParams
if(!startupMap.isEmpty) TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap))
LaunchJob.builder().setLaunchJob(job).setParams(params).build()
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala
index 86c44207e..97c8caeac 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkExtraConfigTransform.scala
@@ -34,7 +34,7 @@ class FlinkExtraConfigTransform extends FlinkConfigTransform {
*/
override protected def configGroup(): String = JobConfKeyConstants.GROUP_FLINK_EXTRA.getValue
- override protected def transform(flinkExtra: util.Map[String, Any], job: LaunchJob): LaunchJob = {
+ override protected def transform(flinkExtra: util.Map[String, AnyRef], job: LaunchJob): LaunchJob = {
transformConfig(flinkExtra.asScala.map(entry =>{
(FlinkConfigTransform.FLINK_CONFIG_PREFIX + entry._1, entry._2)
}).asJava, job)
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala
new file mode 100644
index 000000000..461ce43c3
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkInternalConfigTransform.scala
@@ -0,0 +1,49 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl
+import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkInternalConfigTransform.INTERNAL_CONFIG_MAP
+import org.apache.linkis.common.conf.CommonVars
+
+import java.util
+import scala.collection.JavaConverters.{mapAsJavaMapConverter, mapAsScalaMapConverter}
+
+/**
+ * Flink internal config transform
+ */
+class FlinkInternalConfigTransform extends FlinkConfigTransform {
+
+ /**
+ * Config group name
+ *
+ * @return
+ */
+ override protected def configGroup(): String = JobConfKeyConstants.GROUP_INTERNAL.getValue
+
+ override protected def transform(internalConfig: util.Map[String, AnyRef], job: LaunchJob): LaunchJob = {
+ transformConfig(internalConfig.asScala.map{
+ case (key, value) =>
+ (FlinkConfigTransform.FLINK_CONFIG_PREFIX + (INTERNAL_CONFIG_MAP.get(key) match {
+ case Some(mappingKey) => mappingKey
+ case _ => value
+ }), value)
+ }.asJava, job)
+ }
+}
+
+object FlinkInternalConfigTransform {
+ /**
+ * Defined in FlinkStreamisConfigDefine.LOG_GATEWAY_ADDRESS of 'flink-streamis-log-collector'
+ */
+ private val LOG_GATEWAY_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.log-gateway", "stream.log.gateway.address").getValue
+
+ /**
+ * Defined in FlinkStreamisConfigDefine.LOG_GATEWAY_ADDRESS of 'flink-streamis-log-collector'
+ */
+ private val LOG_COLLECT_PATH_CONFIG_NAME = CommonVars("wds.streamis.flink.config.name.log-collect-path", "stream.log.collect.path").getValue
+
+
+ val INTERNAL_CONFIG_MAP = Map(JobConf.STREAMIS_JOB_LOG_GATEWAY.key -> LOG_GATEWAY_CONFIG_NAME,
+ JobConf.STREAMIS_JOB_LOG_COLLECT_PATH.key -> LOG_COLLECT_PATH_CONFIG_NAME
+ )
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala
index 551ae0f98..3b50a30d2 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkJarStreamisJobContentTransform.scala
@@ -15,6 +15,7 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl
+import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.{JobConfKeyConstants, JobConstants}
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
import java.util
@@ -32,9 +33,9 @@ import scala.collection.mutable
* Created by enjoyyin on 2021/9/23.
*/
class FlinkJarStreamisJobContentTransform extends StreamisJobContentTransform {
- override protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, Any] = transformJob match {
+ override protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, AnyRef] = transformJob match {
case transformJobContent: StreamisJarTransformJobContent =>
- val jobContent = new util.HashMap[String, Any]
+ val jobContent = new util.HashMap[String, AnyRef]
jobContent.put("flink.app.args", transformJobContent.getArgs.asScala.mkString(" "))
jobContent.put("flink.app.main.class", transformJobContent.getMainClass)
jobContent
@@ -46,28 +47,39 @@ class FlinkJarStreamisStartupParamsTransform extends Transform {
override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = streamisTransformJob.getStreamisTransformJobContent match {
case transformJobContent: StreamisJarTransformJobContent =>
- val startupMap = new util.HashMap[String, Any]
+ val startupMap = new util.HashMap[String, AnyRef]
startupMap.put("flink.app.main.class.jar", transformJobContent.getMainClassJar.getFileName)
startupMap.put("flink.app.main.class.jar.bml.json",
JsonUtils.jackson.writeValueAsString(getStreamisFileContent(transformJobContent.getMainClassJar)))
- val classpathFiles = if(transformJobContent.getDependencyJars != null && transformJobContent.getResources != null) {
- startupMap.put("flink.app.user.class.path", transformJobContent.getDependencyJars.asScala.map(_.getFileName).mkString(","))
- transformJobContent.getDependencyJars.asScala ++ transformJobContent.getResources.asScala
- } else if(transformJobContent.getDependencyJars != null) {
- startupMap.put("flink.app.user.class.path", transformJobContent.getDependencyJars.asScala.map(_.getFileName).mkString(","))
- transformJobContent.getDependencyJars.asScala
- } else if(transformJobContent.getResources != null) {
- startupMap.put("flink.yarn.ship-directories", transformJobContent.getResources.asScala.map(_.getFileName).mkString(","))
- transformJobContent.getResources.asScala
+
+ /**
+ * Notice : "flink.app.user.class.path" equals to PipelineOptions.CLASSPATHS in Flink
+ * paths must specify a protocol (e.g. file://) and be accessible on all nodes
+ * so we use "flink.yarn.ship-directories" instead
+ */
+ var classPathFiles = Option(transformJobContent.getDependencyJars) match {
+ case Some(list) => list.asScala
+ case _ => mutable.Buffer[StreamisFile]()
+ }
+ Option(transformJobContent.getResources) match {
+ case Some(list) => classPathFiles = classPathFiles ++ list.asScala
+ case _ => // Do nothing
}
- else mutable.Buffer[StreamisFile]()
- if(classpathFiles.nonEmpty)
+ startupMap.put("flink.yarn.ship-directories", classPathFiles.map(_.getFileName).mkString(","))
+ if(classPathFiles.nonEmpty)
startupMap.put("flink.app.user.class.path.bml.json",
- JsonUtils.jackson.writeValueAsString(classpathFiles.map(getStreamisFileContent).asJava))
- if(transformJobContent.getHdfsJars != null)
+ JsonUtils.jackson.writeValueAsString(classPathFiles.map(getStreamisFileContent).asJava))
+ if(transformJobContent.getHdfsJars != null) {
startupMap.put("flink.user.lib.path", transformJobContent.getHdfsJars.asScala.mkString(","))
- val params = if(job.getParams == null) new util.HashMap[String, Any] else job.getParams
- if(!startupMap.isEmpty) TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap))
+ }
+ // clientTpe
+ val prodConfig = streamisTransformJob.getConfigMap.get(JobConfKeyConstants.GROUP_PRODUCE.getValue).asInstanceOf[util.HashMap[String, AnyRef]]
+ startupMap.put(JobConfKeyConstants.MANAGE_MODE_KEY.getValue, prodConfig.getOrDefault(JobConfKeyConstants.MANAGE_MODE_KEY.getValue, JobConstants.MANAGE_MODE_ATTACH))
+
+ val params = if(job.getParams == null) new util.HashMap[String, AnyRef] else job.getParams
+ if (!startupMap.isEmpty) {
+ TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap))
+ }
LaunchJob.builder().setLaunchJob(job).setParams(params).build()
case _ => job
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala
index 0d6d03994..9465ec7b7 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/FlinkSavepointConfigTransform.scala
@@ -1,6 +1,8 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl
import com.webank.wedatasphere.streamis.jobmanager.launcher.conf.JobConfKeyConstants
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
+import com.webank.wedatasphere.streamis.jobmanager.launcher.linkis.conf.JobLauncherConfiguration.VAR_FLINK_SAVEPOINT_PATH
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl.FlinkSavepointConfigTransform.SAVE_POINT_PREFIX
import java.util
import scala.collection.JavaConverters._
@@ -18,11 +20,21 @@ class FlinkSavepointConfigTransform extends FlinkConfigTransform {
*/
override protected def configGroup(): String = JobConfKeyConstants.GROUP_PRODUCE.getValue
- override protected def transform(valueSet: util.Map[String, Any], job: LaunchJob): LaunchJob = {
- transformConfig(valueSet.asScala.filter(_._1.startsWith(JobConfKeyConstants.SAVEPOINT.getValue))
+ override protected def transform(valueSet: util.Map[String, AnyRef], job: LaunchJob): LaunchJob = {
+ val config: util.Map[String, AnyRef] = valueSet.asScala.filter(_._1.startsWith(JobConfKeyConstants.SAVEPOINT.getValue))
.map{
case (key, value) =>
- (FlinkConfigTransform.FLINK_CONFIG_PREFIX + key.replace(JobConfKeyConstants.SAVEPOINT.getValue, "execution.savepoint."), value)
- }.asJava, job)
+ (FlinkConfigTransform.FLINK_CONFIG_PREFIX + key.replace(JobConfKeyConstants.SAVEPOINT.getValue, SAVE_POINT_PREFIX), value)
+ }.asJava
+ Option(config.get(FlinkConfigTransform.FLINK_CONFIG_PREFIX + SAVE_POINT_PREFIX + "path")) match {
+ case Some(path) =>
+ config.put(VAR_FLINK_SAVEPOINT_PATH.getValue, path)
+ case _ =>
+ }
+ transformConfig(config, job)
}
}
+
+object FlinkSavepointConfigTransform{
+ val SAVE_POINT_PREFIX: String = "execution.savepoint."
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala
index 062eab35c..39f0c1714 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LabelsStreamisCodeTransform.scala
@@ -16,21 +16,25 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
import java.util
import org.apache.linkis.computation.client.utils.LabelKeyUtils
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.Transform
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob
import org.apache.commons.lang.StringUtils
+import org.apache.linkis.manager.label.builder.factory.LabelBuilderFactory
import org.apache.linkis.manager.label.constant.LabelKeyConstant
class LabelsStreamisCodeTransform extends Transform {
override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = {
- val labels = new util.HashMap[String, Any]
+ val labels = new util.HashMap[String, AnyRef]
labels.put(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY, streamisTransformJob.getStreamisJobEngineConn.getEngineConnType)
+ // set engine type and version, like: flink and 1.12.2
+// labels.put(LabelKeyUtils.ENGINE_TYPE_LABEL_KEY, streamisTransformJob.getStreamisJobConnect.getRunType)
+// labels.put("LabelKeyUtils.ENGINE_TYPE_VERSION_LABEL_KEY", streamisTransformJob.getStreamisJobConnect.getRunEngineVersion)
labels.put(LabelKeyUtils.USER_CREATOR_LABEL_KEY, streamisTransformJob.getStreamJob.getSubmitUser + "-Streamis")
// Add the tenant label default
val defaultTenant: String = JobConf.STREAMIS_DEFAULT_TENANT.getValue
@@ -38,6 +42,7 @@ class LabelsStreamisCodeTransform extends Transform {
labels.put(LabelKeyConstant.TENANT_KEY, defaultTenant)
}
labels.put(LabelKeyUtils.ENGINE_CONN_MODE_LABEL_KEY, "once")
+
if (job.getLabels != null) labels.putAll(job.getLabels)
LaunchJob.builder().setLaunchJob(job).setLabels(labels).build()
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala
index 7c14926f0..dbd5751ac 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/LaunchConfigTransform.scala
@@ -16,9 +16,9 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.impl
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.LaunchJob
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
import java.util
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.Transform
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.StreamisTransformJob
import org.apache.linkis.DataWorkCloudApplication
@@ -32,10 +32,10 @@ import org.apache.linkis.DataWorkCloudApplication
class LaunchConfigTransform extends Transform {
override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = {
- val launchConfigs = if(job.getLaunchConfigs != null) job.getLaunchConfigs else new util.HashMap[String, Any]
+ val launchConfigs = if(job.getLaunchConfigs != null) job.getLaunchConfigs else new util.HashMap[String, AnyRef]
launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_CREATE_SERVICE, DataWorkCloudApplication.getServiceInstance.toString)
launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_DESCRIPTION, streamisTransformJob.getStreamJob.getDescription)
- launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME, JobConf.TASK_SUBMIT_TIME_MAX.getValue.toLong)
+ launchConfigs.putIfAbsent(LaunchJob.LAUNCH_CONFIG_MAX_SUBMIT_TIME, JobConf.TASK_SUBMIT_TIME_MAX.getValue.toLong.toString)
LaunchJob.builder().setLaunchJob(job).setLaunchConfigs(launchConfigs).build()
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala
index fac53a166..14f972fda 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/ResourceConfigTransform.scala
@@ -38,7 +38,7 @@ class ResourceConfigTransform extends ConfigTransform {
override protected def configGroup(): String = JobConfKeyConstants.GROUP_RESOURCE.getValue
- override protected def transform(valueSet: util.Map[String, Any], job: LaunchJob): LaunchJob = {
+ override protected def transform(valueSet: util.Map[String, AnyRef], job: LaunchJob): LaunchJob = {
val startupMap = valueSet.asScala.map{
case (key, value) =>
RESOURCE_CONFIG_MAP.get(key) match {
@@ -47,7 +47,7 @@ class ResourceConfigTransform extends ConfigTransform {
case _ => (key, value)
}
}.asJava
- val params = if(job.getParams == null) new util.HashMap[String, Any] else job.getParams
+ val params = if(job.getParams == null) new util.HashMap[String, AnyRef] else job.getParams
if(!startupMap.isEmpty) TaskUtils.addStartupMap(params, JobUtils.filterParameterSpec(startupMap))
LaunchJob.builder().setLaunchJob(job).setParams(params).build()
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala
index b1cb65a6b..8eda9295f 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SourceTransform.scala
@@ -24,7 +24,7 @@ import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.Stre
class SourceTransform extends Transform {
override def transform(streamisTransformJob: StreamisTransformJob, job: LaunchJob): LaunchJob = {
- val source = new util.HashMap[String, Any]
+ val source = new util.HashMap[String, AnyRef]
source.put("project", streamisTransformJob.getStreamJob.getProjectName)
source.put("workspace", streamisTransformJob.getStreamJob.getWorkspaceName)
source.put("job", streamisTransformJob.getStreamJob.getName)
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala
index 67576a187..b06ebbc23 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/impl/SqlStreamisJobContentTransform.scala
@@ -22,9 +22,9 @@ import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{Str
class SqlStreamisJobContentTransform extends StreamisJobContentTransform {
- override protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, Any] = transformJob match {
+ override protected def transformJobContent(transformJob: StreamisTransformJobContent): util.HashMap[String, AnyRef] = transformJob match {
case sqlTransformJob: StreamisSqlTransformJobContent =>
- val jobContent = new util.HashMap[String, Any]
+ val jobContent = new util.HashMap[String, AnyRef]
jobContent.put("code", sqlTransformJob.getSql)
jobContent
case _ => null
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala
index f3e787877..2c89cb9da 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractJobContentParser.scala
@@ -15,6 +15,7 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
import java.io.InputStream
import java.util
@@ -22,7 +23,6 @@ import org.apache.linkis.common.conf.Configuration
import org.apache.linkis.common.utils.{JsonUtils, Logging}
import com.webank.wedatasphere.streamis.jobmanager.manager.dao.StreamJobMapper
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamisFile}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException
import com.webank.wedatasphere.streamis.jobmanager.manager.service.{BMLService, StreamiFileService}
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.JobContentParser
import org.apache.commons.io.IOUtils
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala
new file mode 100644
index 000000000..eb0de6d6e
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/AbstractTaskMetricsParser.scala
@@ -0,0 +1,37 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+
+import java.util
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.TaskMetricsParser
+import org.apache.commons.lang3.StringUtils
+import org.apache.linkis.httpclient.dws.DWSHttpClient
+
+/**
+ *
+ * @date 2022-10-21
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+trait AbstractTaskMetricsParser extends TaskMetricsParser {
+
+ override def parse(metrics: String): JobDetailsVo = {
+ val jobDetailsVO = new JobDetailsVo
+ val dataNumberDTOS = new util.ArrayList[JobDetailsVo.DataNumberDTO]
+ val loadConditionDTOs = new util.ArrayList[JobDetailsVo.LoadConditionDTO]
+ val realTimeTrafficDTOS = new util.ArrayList[JobDetailsVo.RealTimeTrafficDTO]
+ jobDetailsVO.setDataNumber(dataNumberDTOS)
+ jobDetailsVO.setLoadCondition(loadConditionDTOs)
+ jobDetailsVO.setRealTimeTraffic(realTimeTrafficDTOS)
+ val metricsMap = if(StringUtils.isNotBlank(metrics)) DWSHttpClient.jacksonJson.readValue(metrics, classOf[util.Map[String, Object]])
+ else new util.HashMap[String, Object](0)
+ parse(metricsMap, dataNumberDTOS, loadConditionDTOs, realTimeTrafficDTOS)
+ jobDetailsVO
+ }
+
+ protected def parse(metricsMap: util.Map[String, Object],
+ dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO],
+ loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO],
+ realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala
index 5005e7701..67c5cede1 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkJarJobContentParser.scala
@@ -14,13 +14,14 @@
*/
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
import java.util
import org.apache.linkis.common.utils.JsonUtils
import org.apache.linkis.manager.label.entity.engine.RunType
import org.apache.linkis.manager.label.entity.engine.RunType.RunType
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamisFile}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJarTransformJobContent, StreamisTransformJobContent}
import org.apache.commons.lang.StringUtils
import org.springframework.stereotype.Component
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala
index 62d838683..b8d07987e 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkSQLJobContentParser.scala
@@ -15,13 +15,13 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
import java.util
import org.apache.linkis.common.utils.JsonUtils
import org.apache.linkis.manager.label.entity.engine.RunType
import org.apache.linkis.manager.label.entity.engine.RunType.RunType
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion}
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException
import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisSqlTransformJobContent, StreamisTransformJobContent}
import org.springframework.stereotype.Component
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala
new file mode 100644
index 000000000..48eb8ad06
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/FlinkTaskMetricsParser.scala
@@ -0,0 +1,48 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+
+import java.util
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo
+import org.springframework.stereotype.Component
+
+/**
+ *
+ * @date 2022-10-21
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+@Component
+class FlinkTaskMetricsParser extends AbstractTaskMetricsParser {
+
+ override def canParse(streamJob: StreamJob): Boolean = streamJob.getJobType.startsWith("flink.")
+
+ override def parse(metricsMap: util.Map[String, Object],
+ dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO],
+ loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO],
+ realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit = {
+ // TODO This is just sample datas, waiting for it completed. We have planned it to a later release, welcome all partners to join us to realize this powerful feature.
+ val dataNumberDTO = new JobDetailsVo.DataNumberDTO
+ dataNumberDTO.setDataName("kafka topic")
+ dataNumberDTO.setDataNumber(109345)
+ dataNumberDTOS.add(dataNumberDTO)
+
+ val loadConditionDTO = new JobDetailsVo.LoadConditionDTO
+ loadConditionDTO.setType("jobManager")
+ loadConditionDTO.setHost("localhost")
+ loadConditionDTO.setMemory("1.5")
+ loadConditionDTO.setTotalMemory("2.0")
+ loadConditionDTO.setGcLastTime("2020-08-01")
+ loadConditionDTO.setGcLastConsume("1")
+ loadConditionDTO.setGcTotalTime("2min")
+ loadConditionDTOs.add(loadConditionDTO)
+
+ val realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO
+ realTimeTrafficDTO.setSourceKey("kafka topic")
+ realTimeTrafficDTO.setSourceSpeed("100 Records/S")
+ realTimeTrafficDTO.setTransformKey("transform")
+ realTimeTrafficDTO.setSinkKey("hbase key")
+ realTimeTrafficDTO.setSinkSpeed("10 Records/S")
+ realTimeTrafficDTOS.add(realTimeTrafficDTO)
+ }
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala
new file mode 100644
index 000000000..90e124c36
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkJarJobContentParser.scala
@@ -0,0 +1,78 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException
+import java.util
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.{StreamJob, StreamJobVersion, StreamJobVersionFiles, StreamisFile}
+import com.webank.wedatasphere.streamis.jobmanager.manager.transform.entity.{StreamisJarTransformJobContent, StreamisTransformJobContent}
+import org.apache.commons.lang.StringUtils
+import org.apache.linkis.common.utils.JsonUtils
+import org.apache.linkis.manager.label.entity.engine.RunType
+import org.apache.linkis.manager.label.entity.engine.RunType.RunType
+import org.springframework.stereotype.Component
+
+import scala.collection.JavaConverters._
+
+/**
+ *
+ * @date 2022-10-19
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+@Component
+class SparkJarJobContentParser extends AbstractJobContentParser {
+
+ override val jobType: String = "spark.jar"
+ override val runType: RunType = RunType.JAR
+
+ override def parseTo(job: StreamJob, jobVersion: StreamJobVersion): StreamisTransformJobContent = {
+ val createFile: String => StreamisFile = fileName => {
+ val file = new StreamJobVersionFiles()
+ file.setFileName(fileName)
+ file.setCreateBy(job.getCreateBy)
+ file.setCreateTime(job.getCreateTime)
+ file.setJobId(job.getId)
+ file.setJobVersionId(jobVersion.getId)
+ file.setVersion(jobVersion.getVersion)
+ file.setStorePath("")
+ file.setStoreType("")
+ file
+ }
+ val transformJobContent = new StreamisJarTransformJobContent
+ val jobContent = JsonUtils.jackson.readValue(jobVersion.getJobContent, classOf[util.Map[String, Object]])
+ jobContent.get("main.class.jar") match {
+ case mainClassJar: String =>
+ transformJobContent.setMainClassJar(createFile(mainClassJar))
+ case _ => throw new JobExecuteErrorException(30500, "main.class.jar is needed.")
+ }
+ jobContent.get("main.class") match {
+ case mainClass: String =>
+ transformJobContent.setMainClass(mainClass)
+ case _ => throw new JobExecuteErrorException(30500, "main.class is needed.")
+ }
+ jobContent.get("args") match {
+ case args: util.List[String] =>
+ transformJobContent.setArgs(args)
+ case _ =>
+ }
+ jobContent.get("hdfs.jars") match {
+ case hdfsJars: util.List[String] =>
+ transformJobContent.setHdfsJars(hdfsJars)
+ case _ =>
+ }
+ jobContent.get("dependency.jars") match {
+ case dependencyJars: util.List[String] =>
+ val parsedDependencyJars = dependencyJars.asScala.filter(StringUtils.isNotBlank).map(createFile).asJava
+ transformJobContent.setDependencyJars(parsedDependencyJars)
+ case _ =>
+ }
+ jobContent.get("resources") match {
+ case resources: util.List[String] =>
+ val parsedResources = resources.asScala.filter(StringUtils.isNotBlank).map(createFile).asJava
+ transformJobContent.setResources(parsedResources)
+ case _ =>
+ }
+ transformJobContent
+ }
+
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala
new file mode 100644
index 000000000..c6e00d7ff
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/transform/parser/SparkTaskMetricsParser.scala
@@ -0,0 +1,86 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.transform.parser
+
+import java.util
+
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob
+import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.JobDetailsVo
+import org.apache.linkis.common.utils.Utils
+import org.springframework.stereotype.Component
+
+import scala.collection.JavaConverters._
+
+/**
+ *
+ * @date 2022-10-21
+ * @author enjoyyin
+ * @since 0.5.0
+ */
+@Component
+class SparkTaskMetricsParser extends AbstractTaskMetricsParser {
+
+ override protected def parse(metricsMap: util.Map[String, Object],
+ dataNumberDTOS: util.List[JobDetailsVo.DataNumberDTO],
+ loadConditionDTOs: util.List[JobDetailsVo.LoadConditionDTO],
+ realTimeTrafficDTOS: util.List[JobDetailsVo.RealTimeTrafficDTO]): Unit = {
+ val addDataNumberDTO: String => Unit = key => {
+ val batch = new JobDetailsVo.DataNumberDTO
+ batch.setDataName(key)
+ batch.setDataNumber(metricsMap.get(key) match {
+ case null => -1
+ case num => num.toString.toInt
+ })
+ dataNumberDTOS.add(batch)
+ }
+ addDataNumberDTO("waitingBatchs")
+ addDataNumberDTO("runningBatchs")
+ addDataNumberDTO("completedBatchs")
+ metricsMap.get("executors") match {
+ case executors: util.List[util.Map[String, AnyRef]] if !executors.isEmpty =>
+ executors.asScala.foreach { executor =>
+ val loadConditionDTO = new JobDetailsVo.LoadConditionDTO
+ loadConditionDTO.setType(executor.get("type").asInstanceOf[String])
+ loadConditionDTO.setHost(executor.get("host").asInstanceOf[String])
+ loadConditionDTO.setMemory(executor.get("memory").asInstanceOf[String])
+ loadConditionDTO.setTotalMemory(executor.get("totalMemory").asInstanceOf[String])
+ loadConditionDTO.setGcLastTime(executor.get("gcLastTime").asInstanceOf[String])
+ loadConditionDTO.setGcLastConsume(executor.get("gcLastConsume").asInstanceOf[String])
+ loadConditionDTO.setGcTotalTime(executor.get("gcTotalTime").asInstanceOf[String])
+ loadConditionDTOs.add(loadConditionDTO)
+ }
+ case _ =>
+ val loadConditionDTO = new JobDetailsVo.LoadConditionDTO
+ loadConditionDTO.setType("Driver")
+ loadConditionDTO.setHost("")
+ loadConditionDTO.setMemory("")
+ loadConditionDTO.setTotalMemory("")
+ loadConditionDTO.setGcLastTime("")
+ loadConditionDTO.setGcLastConsume("")
+ loadConditionDTO.setGcTotalTime("")
+ loadConditionDTOs.add(loadConditionDTO)
+ }
+ val realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO
+ metricsMap.get("batchMetrics") match {
+ case batchMetrics: util.List[util.Map[String, Object]] if !batchMetrics.isEmpty =>
+ val batchMetric = batchMetrics.asScala.maxBy(_.get("batchTime").asInstanceOf[String])
+ realTimeTrafficDTO.setSourceKey(metricsMap.getOrDefault("source", "").asInstanceOf[String])
+ realTimeTrafficDTO.setSourceSpeed(batchMetric.get("inputRecords") + " Records")
+ realTimeTrafficDTO.setTransformKey("processing")
+ realTimeTrafficDTO.setSinkKey(metricsMap.getOrDefault("sink", "").asInstanceOf[String])
+ val sinkSpeed = if (batchMetric.containsKey("totalDelay") && batchMetric.get("totalDelay") != null)
+ Utils.msDurationToString(batchMetric.get("totalDelay").toString.toInt) + " totalDelay"
+ else if (batchMetric.containsKey("taskExecuteTime") && batchMetric.get("taskExecuteTime") != null)
+ Utils.msDurationToString(batchMetric.get("taskExecuteTime").toString.toInt) + " executeTime(Last Batch)"
+ else ""
+ realTimeTrafficDTO.setSinkSpeed(sinkSpeed)
+ case _ =>
+ realTimeTrafficDTO.setSourceKey("")
+ realTimeTrafficDTO.setSourceSpeed(" Records/S")
+ realTimeTrafficDTO.setTransformKey("")
+ realTimeTrafficDTO.setSinkKey("")
+ realTimeTrafficDTO.setSinkSpeed(" Records/S")
+ }
+ realTimeTrafficDTOS.add(realTimeTrafficDTO)
+ }
+
+ override def canParse(streamJob: StreamJob): Boolean = streamJob.getJobType.startsWith("spark.")
+}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala
index 485f9c8ce..7df353459 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/JobUtils.scala
@@ -1,5 +1,6 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.utils
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
import java.util
import scala.collection.JavaConverters.{asScalaSetConverter, mapAsScalaMapConverter}
@@ -8,11 +9,11 @@ object JobUtils {
* Filter the illegal characters parameter specific
* @param params parameters
*/
- def filterParameterSpec(params: util.Map[String, Any]): util.Map[String, Any] ={
+ def filterParameterSpec(params: util.Map[String, AnyRef]): util.Map[String, AnyRef] ={
for (paramEntry <- params.entrySet().asScala){
val value = paramEntry.getValue
value match {
- case str: String => paramEntry.setValue(str.replace(" ", "\\0x001"))
+ case str: String => paramEntry.setValue(str.replace(" ", JobConf.STREAMIS_JOB_PARAM_BLANK_PLACEHOLDER.getValue))
case _ =>
}
}
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala
index 6de13e67d..0fe3b55f1 100644
--- a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/manager/utils/StreamTaskUtils.scala
@@ -17,7 +17,7 @@
package com.webank.wedatasphere.streamis.jobmanager.manager.utils
import com.webank.wedatasphere.streamis.jobmanager.launcher.job.JobInfo
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamTask
import org.apache.commons.lang.StringUtils
import org.apache.linkis.httpclient.dws.DWSHttpClient
diff --git a/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/test/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileContainerTest.scala b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/test/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileContainerTest.scala
new file mode 100644
index 000000000..29988901a
--- /dev/null
+++ b/streamis-jobmanager/streamis-job-manager/streamis-job-manager-service/src/test/scala/com/webank/wedatasphere/streamis/jobmanager/manager/material/StreamFileContainerTest.scala
@@ -0,0 +1,11 @@
+package com.webank.wedatasphere.streamis.jobmanager.manager.material
+
+import org.junit.Test
+
+
+class StreamFileContainerTest {
+ @Test
+ def scanAndLoadTheFile(): Unit = {
+ print("hello")
+ }
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-common/pom.xml b/streamis-jobmanager/streamis-jobmanager-common/pom.xml
index b63c59d15..cff25ea00 100644
--- a/streamis-jobmanager/streamis-jobmanager-common/pom.xml
+++ b/streamis-jobmanager/streamis-jobmanager-common/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/streamis-jobmanager/streamis-jobmanager-server/pom.xml b/streamis-jobmanager/streamis-jobmanager-server/pom.xml
index 11f353a55..078f77ee1 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/pom.xml
+++ b/streamis-jobmanager/streamis-jobmanager-server/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
@@ -71,12 +71,16 @@
linkis-rpc
${linkis.version}
-
com.webank.wedatasphere.streamis
streamis-jobmanager-common
${streamis.version}
+
+
+
+
+
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java
index 8c20b0460..c5b1280d0 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/exception/JobExceptionManager.java
@@ -19,8 +19,10 @@
import java.util.Map;
public class JobExceptionManager {
+
+ private JobExceptionManager(){}
//30300-30599
- private static Map desc = new HashMap(32);
+ private static Map desc = new HashMap<>(32);
static {
desc.put("30300", "upload failure(上传失败)");
desc.put("30301","%s cannot be empty!");
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/model/BaseRequest.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/model/BaseRequest.java
new file mode 100644
index 000000000..0f41908d8
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/model/BaseRequest.java
@@ -0,0 +1,14 @@
+package com.webank.wedatasphere.streamis.jobmanager.model;
+
+public class BaseRequest {
+
+ private String uri;
+
+ public String getUri() {
+ return uri;
+ }
+
+ public void setUri(String uri) {
+ this.uri = uri;
+ }
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/model/ResponseWithHeaders.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/model/ResponseWithHeaders.java
new file mode 100644
index 000000000..87a4c0652
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/model/ResponseWithHeaders.java
@@ -0,0 +1,31 @@
+package com.webank.wedatasphere.streamis.jobmanager.model;
+
+import org.apache.http.Header;
+
+public class ResponseWithHeaders {
+
+ private String responseStr;
+
+ private Header[] headers;
+
+ public ResponseWithHeaders(String responseStr, Header[] headers) {
+ this.responseStr = responseStr;
+ this.headers = headers;
+ }
+
+ public String getResponseStr() {
+ return responseStr;
+ }
+
+ public void setResponseStr(String responseStr) {
+ this.responseStr = responseStr;
+ }
+
+ public Header[] getHeaders() {
+ return headers;
+ }
+
+ public void setHeaders(Header[] headers) {
+ this.headers = headers;
+ }
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java
index 0b97ae5c8..4a9ddaa87 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobBulkRestfulApi.java
@@ -3,7 +3,7 @@
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob;
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.ExecResultVo;
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.vo.PauseResultVo;
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobExecuteErrorException;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobExecuteErrorException;
import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService;
import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService;
import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamTaskService;
@@ -14,7 +14,7 @@
import org.apache.commons.lang3.StringUtils;
import org.apache.linkis.scheduler.queue.SchedulerEventState;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.RequestBody;
@@ -60,7 +60,7 @@ public Message bulkExecution(@RequestBody JobBulkRequest execBulkRequest, HttpSe
}
Message result = Message.ok("success");
try{
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "bulk execute job");
LOG.info("Bulk execution[operator: {} sbj_type: {}, subjects: ({})]", username,
execBulkRequest.getBulkSubjectType(), StringUtils.join(execBulkRequest.getBulkSubject(), ","));
// TODO Check the permission of task id
@@ -113,7 +113,7 @@ public Message bulkPause(@RequestBody JobBulkPauseRequest pauseRequest, HttpServ
}
Message result = Message.ok("success");
try{
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "bulk pause job");
LOG.info("Bulk pause[operator: {}, sbj_type: {}, snapshot: {}, subjects: ({})]",
username, pauseRequest.getBulkSubjectType(), pauseRequest.isSnapshot(),
StringUtils.join(pauseRequest.getBulkSubject(), ","));
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java
index 02b2da0b9..6e3c52f7e 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfExtRestfulApi.java
@@ -17,9 +17,10 @@
import com.webank.wedatasphere.streamis.jobmanager.manager.util.CookieUtils;
import com.webank.wedatasphere.streamis.jobmanager.service.UserService;
+import com.webank.wedatasphere.streamis.jobmanager.utils.RegularUtil;
import org.apache.commons.lang.StringUtils;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -40,11 +41,12 @@ public class JobConfExtRestfulApi {
@RequestMapping(path = "/getWorkspaceUsers", method = RequestMethod.GET)
public Message getWorkspaceUsers(HttpServletRequest req) {
+ String userName = ModuleUserUtils.getOperationUser(req,"get Workspace Users");
+ LOG.info(userName);
//获取工作空间
List userList = new ArrayList<>();
String workspaceId = CookieUtils.getCookieWorkspaceId(req);
- if (StringUtils.isNotBlank(workspaceId)) {
- String userName = SecurityFilter.getLoginUsername(req);
+ if (RegularUtil.matches(workspaceId)) {
userList.addAll(userService.workspaceUserQuery(req, workspaceId));
} else {
LOG.warn("Cannot find the workspaceID from DSS,perhaps the cookie value has been lost in request from: {}", req.getLocalAddr());
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java
index 8f8570c9b..516904b64 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/JobConfRestfulApi.java
@@ -19,15 +19,15 @@
import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.JobConfDefinition;
import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfDefinitionVo;
import com.webank.wedatasphere.streamis.jobmanager.launcher.entity.vo.JobConfValueSet;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.conf.JobConf;
import com.webank.wedatasphere.streamis.jobmanager.launcher.service.StreamJobConfService;
-import com.webank.wedatasphere.streamis.jobmanager.manager.conf.JobConf;
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamJob;
-import com.webank.wedatasphere.streamis.jobmanager.manager.exception.JobErrorException;
+import com.webank.wedatasphere.streamis.jobmanager.launcher.job.exception.JobErrorException;
import com.webank.wedatasphere.streamis.jobmanager.manager.project.service.ProjectPrivilegeService;
import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService;
import org.apache.linkis.httpclient.dws.DWSHttpClient;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.*;
@@ -54,10 +54,11 @@ public class JobConfRestfulApi {
/**
* Definitions
+ *
* @return message
*/
@RequestMapping(value = "/definitions")
- public Message definitions(){
+ public Message definitions() {
Message result = Message.ok("success");
try {
List definitionList = this.streamJobConfService.loadAllDefinitions();
@@ -80,12 +81,12 @@ public Message definitions(){
definitionRelation.values().stream().filter(definitionVo -> definitionVo.getLevel() == 0)
.sorted((o1, o2) -> o2.getSort() - o1.getSort()).collect(Collectors.toList());
def.forEach(definitionVo -> {
- if (Objects.isNull(definitionVo.getChildDef())){
+ if (Objects.isNull(definitionVo.getChildDef())) {
definitionVo.setChildDef(Collections.emptyList());
}
});
result.data("def", def);
- }catch(Exception e){
+ } catch (Exception e) {
String message = "Fail to obtain StreamJob configuration definitions(获取任务配置定义集失败), message: " + e.getMessage();
LOG.warn(message, e);
result = Message.error(message);
@@ -95,20 +96,22 @@ public Message definitions(){
/**
* Query job config json
+ *
* @return config json
*/
@RequestMapping(value = "/json/{jobId:\\w+}", method = RequestMethod.GET)
- public Message queryConfig(@PathVariable("jobId") Long jobId, HttpServletRequest request){
+ public Message queryConfig(@PathVariable("jobId") Long jobId, HttpServletRequest request) {
Message result = Message.ok("success");
try {
- String userName = SecurityFilter.getLoginUsername(request);
+ String userName = ModuleUserUtils.getOperationUser(request, "query job config json");
StreamJob streamJob = this.streamJobService.getJobById(jobId);
- if (!streamJobService.hasPermission(streamJob, userName) &&
- !this.privilegeService.hasAccessPrivilege(request, streamJob.getProjectName())){
- throw new JobErrorException(-1, "Have no permission to view StreamJob [" + jobId + "] configuration");
+
+ if (!streamJobService.hasPermission(streamJob, userName)
+ && !this.privilegeService.hasAccessPrivilege(request, streamJob.getProjectName())) {
+ return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]");
}
result.setData(new HashMap<>(this.streamJobConfService.getJobConfig(jobId)));
- }catch(Exception e){
+ } catch (Exception e) {
String message = "Fail to view StreamJob configuration(查看任务配置失败), message: " + e.getMessage();
LOG.warn(message, e);
result = Message.error(message);
@@ -118,17 +121,18 @@ public Message queryConfig(@PathVariable("jobId") Long jobId, HttpServletRequest
/**
* Save job config json
- * @param jobId job id
+ *
+ * @param jobId job id
* @param configContent config content
- * @param request request
+ * @param request request
* @return
*/
@RequestMapping(value = "/json/{jobId:\\w+}", method = RequestMethod.POST)
public Message saveConfig(@PathVariable("jobId") Long jobId, @RequestBody Map configContent,
- HttpServletRequest request){
+ HttpServletRequest request) {
Message result = Message.ok("success");
- try{
- String userName = SecurityFilter.getLoginUsername(request);
+ try {
+ String userName = ModuleUserUtils.getOperationUser(request, "save job config json");
StreamJob streamJob = this.streamJobService.getJobById(jobId);
// Accept the developer to modify
if (!streamJobService.isCreator(jobId, userName) &&
@@ -137,30 +141,30 @@ public Message saveConfig(@PathVariable("jobId") Long jobId, @RequestBody Map jobLaunchManager;
@@ -73,8 +91,9 @@ public Message getJobList(HttpServletRequest req,
@RequestParam(value = "projectName", required = false) String projectName,
@RequestParam(value = "jobName", required = false) String jobName,
@RequestParam(value = "jobStatus", required = false) Integer jobStatus,
- @RequestParam(value = "jobCreator", required = false) String jobCreator) throws JobException {
- String username = SecurityFilter.getLoginUsername(req);
+ @RequestParam(value = "jobCreator", required = false) String jobCreator,
+ @RequestParam(value = "label", required = false) String label) {
+ String username = ModuleUserUtils.getOperationUser(req, "list jobs");
if(StringUtils.isBlank(projectName)){
return Message.error("Project name cannot be empty(项目名不能为空,请指定)");
}
@@ -87,7 +106,7 @@ public Message getJobList(HttpServletRequest req,
PageInfo pageInfo;
PageHelper.startPage(pageNow, pageSize);
try {
- pageInfo = streamJobService.getByProList(projectName, username, jobName, jobStatus, jobCreator);
+ pageInfo = streamJobService.getByProList(projectName, username, jobName, jobStatus, jobCreator,label);
} finally {
PageHelper.clearPage();
}
@@ -96,8 +115,8 @@ public Message getJobList(HttpServletRequest req,
}
@RequestMapping(path = "/createOrUpdate", method = RequestMethod.POST)
- public Message createOrUpdate(HttpServletRequest req, @Validated @RequestBody MetaJsonInfo metaJsonInfo) throws Exception {
- String username = SecurityFilter.getLoginUsername(req);
+ public Message createOrUpdate(HttpServletRequest req, @Validated @RequestBody MetaJsonInfo metaJsonInfo) {
+ String username = ModuleUserUtils.getOperationUser(req, "create or update job");
String projectName = metaJsonInfo.getProjectName();
if (StringUtils.isBlank(projectName)){
return Message.error("Project name cannot be empty(项目名不能为空,请指定)");
@@ -109,6 +128,69 @@ public Message createOrUpdate(HttpServletRequest req, @Validated @RequestBody Me
return Message.ok().data("jobId", job.getJobId());
}
+ @RequestMapping(path = "/updateLabel", method = RequestMethod.POST)
+ public Message updateLabel(HttpServletRequest req, @RequestBody BulkUpdateLabelRequest bulkUpdateLabelRequest) {
+ Message result = Message.ok("success");
+
+ String userName = ModuleUserUtils.getOperationUser(req, "update Label");
+ List tasksData = bulkUpdateLabelRequest.getTasks();
+ List jobList = new ArrayList<>();
+ for (BulkUpdateLabel bulkUpdateLabel : tasksData) {
+ Long jobId = bulkUpdateLabel.getId();
+ StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if (!streamJobService.isCreator(jobId, userName) &&
+ !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) {
+ return Message.error("Have no permission to save StreamJob [" + jobId + "] configuration");
+ }
+ String label = bulkUpdateLabel.getLabel();
+ if (!RegularUtil.matches(label))
+ return Message.error("Fail to save StreamJob label(保存/更新标签失败), message: " + "仅支持大小写字母、数字、下划线、小数点、逗号且长度小于64位 [" + jobId + "] ");
+ StreamJob job = new StreamJob();
+ job.setLabel(label);
+ job.setId(jobId);
+ jobList.add(job);
+ }
+ for (StreamJob streamJob : jobList) {
+ streamJobService.updateLabel(streamJob);
+ }
+ return result;
+ }
+
+ @RequestMapping(path = "{jobId:\\w+}/versions", method = RequestMethod.GET)
+ public Message versions(HttpServletRequest req, @PathVariable("jobId")Integer jobId,
+ @RequestParam(value = "pageNow", required = false) Integer pageNow,
+ @RequestParam(value = "pageSize", required = false) Integer pageSize){
+ String userName = ModuleUserUtils.getOperationUser(req, "Query job version page");
+ if (Objects.isNull(pageNow)) {
+ pageNow = 1;
+ }
+ if (Objects.isNull(pageSize)){
+ pageSize = 20;
+ }
+ StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if (Objects.isNull(streamJob)){
+ return Message.error("Unknown StreamJob with id: " + jobId + "(无法找到对应的流任务)");
+ }
+ if (!streamJobService.hasPermission(streamJob, userName)
+ &&!this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())){
+ return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]");
+
+ }
+ Message result = Message.ok();
+ PageHelper.startPage(pageNow, pageSize);
+ try{
+ PageInfo pageInfo = this.streamJobService.getVersionList(jobId);
+ if (Objects.nonNull(pageInfo)){
+ result.data("versions", pageInfo.getList());
+ result.data("totalPage", pageInfo.getTotal());
+ }
+ } catch (Exception e){
+ result = Message.error("Fail to query job version page (查看任务版本列表失败), message: " + e.getMessage());
+ } finally{
+ PageHelper.clearPage();
+ }
+ return result;
+ }
@RequestMapping(path = "/version", method = RequestMethod.GET)
public Message version(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId,
@RequestParam(value = "version", required = false) String version) throws JobException {
@@ -118,26 +200,111 @@ public Message version(HttpServletRequest req, @RequestParam(value = "jobId", re
if (StringUtils.isEmpty(version)) {
throw JobExceptionManager.createException(30301, "version");
}
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req, "view the job version");
StreamJob streamJob = this.streamJobService.getJobById(jobId);
if (!streamJobService.hasPermission(streamJob, username) &&
- !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) {
+ !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) {
return Message.error("Have no permission to view versions of StreamJob [" + jobId + "]");
}
VersionDetailVo versionDetailVO = streamJobService.versionDetail(jobId, version);
return Message.ok().data("detail", versionDetailVO);
}
+ /**
+ * Inspect the execution
+ * @param req request
+ * @return message
+ */
+ @RequestMapping(path = "/execute/inspect", method = RequestMethod.PUT)
+ public Message executeInspect(HttpServletRequest req, @RequestParam(value = "jobId")List jobIdList){
+ Message result = Message.ok();
+ String userName = ModuleUserUtils.getOperationUser(req, "Inspect of execution");
+
+ for (Integer jobId : jobIdList) {
+ StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if (Objects.isNull(streamJob)){
+ return Message.error("Unknown StreamJob with id: " + jobId + "(无法找到对应的流任务)");
+ }
+ if (!streamJobService.hasPermission(streamJob, userName) &&
+ !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())){
+ return Message.error("Have no permission to inspect the StreamJob [" + jobId + "]");
+ }
+ try {
+ HashMap jobConfig = new HashMap<>(this.streamJobConfService.getJobConfig(jobId));
+ HashMap flinkProduce = (HashMap) jobConfig.get(JobConfKeyConstants.GROUP_PRODUCE().getValue());
+ if (!flinkProduce.containsKey(JobConfKeyConstants.ALERT_USER().getValue())){
+ return Message.error("The StreamJob alarm recipient is not configured (未配置有效的失败告警用户) [" + jobId + "]");
+ } else {
+ String users = String.valueOf(flinkProduce.get(JobConfKeyConstants.ALERT_USER().getValue()));
+ if (users.isEmpty()){
+ return Message.error("The StreamJob alarm recipient is not configured (未配置有效的失败告警用户) [" + jobId + "]");
+ }else {
+ List userList=Arrays.asList(users.split(","));
+ int i =0;
+ for (String user :userList){
+ if (user.toLowerCase().contains("hduser")){
+ i++;
+ }
+ }
+ //防止配置多个hduser用户跳过验证。
+ if (userList.size()==i){
+ return Message.error("Please configure an alarm recipient other than hduser [" + jobId + "]");
+ }
+ }
+ }
+ }catch(Exception e){
+ String message = "Fail to view StreamJob configuration(查看任务配置失败), message: " + e.getMessage();
+ LOG.warn(message, e);
+ result = Message.error(message);
+ }
+
+ // Get inspect result of the job
+ List inspectResult = new ArrayList<>();
+ List inspections = new ArrayList<>();
+ try {
+ inspectResult = this.streamJobInspectService
+ .inspect(jobId, new JobInspectVo.Types[]{JobInspectVo.Types.VERSION, JobInspectVo.Types.SNAPSHOT, JobInspectVo.Types.LIST});
+ inspections = inspectResult.stream().map(JobInspectVo::getInspectName)
+ .collect(Collectors.toList());
+ } catch (Exception e){
+ return Message.error("Fail to inspect job " + jobId + " of the execution(任务执行前检查失败), message: " + e.getMessage());
+ }
+
+ HashMap inspectResultMap = new HashMap<>();
+ inspectResult.forEach(inspect -> inspectResultMap.put(inspect.getInspectName(), inspect));
+ if (!inspectResultMap.containsKey("snapshot")){
+ String value = this.streamJobConfService.getJobConfValue(jobId, JobConfKeyConstants.START_AUTO_RESTORE_SWITCH().getValue());
+ String msg;
+ if (value.equals("ON")){
+ msg ="获取到了空的快照地址";
+ }else {
+ msg ="任务未开启快照,无需检查快照地址";
+ }
+ inspections.add("snapshot");
+ JobSnapshotInspectVo jobSnapshotInspectVo =new JobSnapshotInspectVo();
+ jobSnapshotInspectVo.setPath(msg);
+ inspectResultMap.put("snapshot",jobSnapshotInspectVo);
+ }
+ inspectResultMap.put("inspections", inspections);
+ result.setData(inspectResultMap);
+ }
+ return result;
+ }
@RequestMapping(path = "/execute", method = RequestMethod.POST)
public Message executeJob(HttpServletRequest req, @RequestBody Map json) throws JobException {
- String userName = SecurityFilter.getLoginUsername(req);
+ String userName = ModuleUserUtils.getOperationUser(req, "execute job");
if (!json.containsKey("jobId") || json.get("jobId") == null) {
throw JobExceptionManager.createException(30301, "jobId");
}
long jobId = Long.parseLong(json.get("jobId").toString());
LOG.info("{} try to execute job {}.", userName, jobId);
StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if(streamJob == null) {
+ return Message.error("not exists job " + jobId);
+ } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) {
+ return Message.error("Job " + streamJob.getName() + " is not supported to execute.");
+ }
if (!streamJobService.hasPermission(streamJob, userName) &&
!this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) {
return Message.error("Have no permission to execute StreamJob [" + jobId + "]");
@@ -155,74 +322,57 @@ public Message executeJob(HttpServletRequest req, @RequestBody Map dataNumberDTOS = new ArrayList<>();
- JobDetailsVo.DataNumberDTO dataNumberDTO = new JobDetailsVo.DataNumberDTO();
- dataNumberDTO.setDataName("kafka topic");
- dataNumberDTO.setDataNumber(109345);
- dataNumberDTOS.add(dataNumberDTO);
-
- List loadConditionDTOs = new ArrayList<>();
- JobDetailsVo.LoadConditionDTO loadConditionDTO = new JobDetailsVo.LoadConditionDTO();
- loadConditionDTO.setType("jobManager");
- loadConditionDTO.setHost("localhost");
- loadConditionDTO.setMemory("1.5");
- loadConditionDTO.setTotalMemory("2.0");
- loadConditionDTO.setGcLastTime("2020-08-01");
- loadConditionDTO.setGcLastConsume("1");
- loadConditionDTO.setGcTotalTime("2min");
- loadConditionDTOs.add(loadConditionDTO);
-
- List realTimeTrafficDTOS = new ArrayList<>();
- JobDetailsVo.RealTimeTrafficDTO realTimeTrafficDTO = new JobDetailsVo.RealTimeTrafficDTO();
- realTimeTrafficDTO.setSourceKey("kafka topic");
- realTimeTrafficDTO.setSourceSpeed("100 Records/S");
- realTimeTrafficDTO.setTransformKey("transform");
- realTimeTrafficDTO.setSinkKey("hbase key");
- realTimeTrafficDTO.setSinkSpeed("10 Records/S");
- realTimeTrafficDTOS.add(realTimeTrafficDTO);
-
-
- jobDetailsVO.setLinkisJobInfo(streamTaskService.getTask(jobId,version));
- jobDetailsVO.setDataNumber(dataNumberDTOS);
- jobDetailsVO.setLoadCondition(loadConditionDTOs);
- jobDetailsVO.setRealTimeTraffic(realTimeTrafficDTOS);
-
- return Message.ok().data("details", jobDetailsVO);
+ String username = ModuleUserUtils.getOperationUser(req, "view the job details");
+ StreamJob streamJob = streamJobService.getJobById(jobId);
+ if (!streamJobService.hasPermission(streamJob, username)
+ && !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())){
+ return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]");
+ }
+ if(streamJob == null) {
+ return Message.error("not exists job " + jobId);
+ }
+ return Message.ok().data("details", streamTaskService.getJobDetailsVO(streamJob, version));
}
@RequestMapping(path = "/execute/history", method = RequestMethod.GET)
public Message executeHistoryJob(HttpServletRequest req,
@RequestParam(value = "jobId", required = false) Long jobId,
- @RequestParam(value = "version", required = false) String version) throws IOException, JobException {
- String username = SecurityFilter.getLoginUsername(req);
+ @RequestParam(value = "version", required = false) String version) throws JobException {
+ String username = ModuleUserUtils.getOperationUser(req, "view the job history");
if (jobId == null) {
throw JobExceptionManager.createException(30301, "jobId");
}
@@ -231,21 +381,211 @@ public Message executeHistoryJob(HttpServletRequest req,
}
StreamJob streamJob = this.streamJobService.getJobById(jobId);
if (!streamJobService.hasPermission(streamJob, username) &&
- !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) {
- return Message.error("Have no permission to view execution history of StreamJob [" + jobId + "]");
+ !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())){
+ return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]");
}
List details = streamTaskService.queryHistory(jobId, version);
return Message.ok().data("details", details);
}
+ private Message withStreamJob(HttpServletRequest req, String projectName,
+ String jobName, String username,
+ Function streamJobFunction) {
+ if(StringUtils.isBlank(projectName)) {
+ return Message.error("projectName cannot be empty!");
+ } else if(StringUtils.isBlank(jobName)) {
+ return Message.error("jobName cannot be empty!");
+ }
+ List streamJobs = streamJobService.getByProList(projectName, username, jobName, null, null,null).getList();
+ if(CollectionUtils.isEmpty(streamJobs)) {
+ return Message.error("Not exits Streamis job " + jobName);
+ } else if(streamJobs.size() > 1) {
+ return Message.error("Too many Streamis Job named " + jobName + ", we cannot distinguish between them.");
+ } else if(!"spark.jar".equals(streamJobs.get(0).getJobType())) {
+ return Message.error("Only spark.jar Job support to manage task.");
+ }
+ StreamJob streamJob = streamJobService.getJobById(streamJobs.get(0).getId());
+ if (!streamJobService.hasPermission(streamJob, username) &&
+ !this.privilegeService.hasEditPrivilege(req, streamJob.getProjectName())) {
+ return Message.error("Have no permission to operate task for StreamJob [" + jobName + "].");
+ }
+ return streamJobFunction.apply(streamJob);
+ }
+
+ @RequestMapping(path = "/addTask", method = RequestMethod.GET)
+ public Message addTask(HttpServletRequest req,
+ @RequestParam(value = "projectName") String projectName,
+ @RequestParam(value = "jobName") String jobName,
+ @RequestParam(value = "appId") String appId,
+ @RequestParam(value = "appUrl") String appUrl) {
+ String username = ModuleUserUtils.getOperationUser(req, "add task");
+ LOG.info("User {} try to add a new task for Streamis job {}.{} with appId: {}, appUrl: {}.", username, projectName, jobName, appId, appUrl);
+ if(StringUtils.isBlank(appId)) {
+ return Message.error("appId cannot be empty!");
+ }
+ return withStreamJob(req, projectName, jobName, username, streamJob -> {
+ // 如果存在正在运行的,先将其停止掉
+ StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId());
+ if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) {
+ LOG.warn("Streamis Job {} exists running task, update its status from Running to stopped at first.", jobName);
+ streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue());
+ streamTask.setErrDesc("stopped by App's new task.");
+ streamTaskService.updateTask(streamTask);
+ }
+ if(streamTask == null || StringUtils.isBlank(streamTask.getLinkisJobInfo())) {
+ // 这里取个巧,从该工程该用户有权限的Job中找到一个Flink的历史作业,作为这个Spark Streaming作业的jobId和jobInfo
+ // 替换掉JobInfo中的 yarn 信息,这样我们前端就可以在不修改任何逻辑的情况下正常展示Spark Streaming作业了
+ PageInfo jobList = streamJobService.getByProList(streamJob.getProjectName(), username, null, null, null,null);
+ List copyJobs = jobList.getList().stream().filter(job -> !job.getJobType().startsWith("spark."))
+ .collect(Collectors.toList());
+ if(copyJobs.isEmpty()) {
+ return Message.error("no Flink Job has been submitted, the register to Streamis cannot be succeeded.");
+ }
+ int index = 0;
+ streamTask = null;
+ while(streamTask == null && index < copyJobs.size()) {
+ StreamTask copyTask = streamTaskService.getLatestTaskByJobId(copyJobs.get(index).getId());
+ if(copyTask == null || StringUtils.isBlank(copyTask.getLinkisJobInfo())) {
+ index ++;
+ } else {
+ LOG.warn("Streamis Job {} will bind the linkisJobInfo from history Flink Job {} with linkisJobId: {}, linkisJobInfo: {}.",
+ jobName, copyJobs.get(index).getName(), copyTask.getLinkisJobId(), copyTask.getLinkisJobInfo());
+ streamTask = streamTaskService.createTask(streamJob.getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username);
+ streamTask.setLinkisJobId(copyTask.getLinkisJobId());
+ streamTask.setLinkisJobInfo(copyTask.getLinkisJobInfo());
+ }
+ }
+ if(streamTask == null) {
+ return Message.error("no Flink task has been executed, the register to Streamis cannot be succeeded.");
+ }
+ } else {
+ StreamTask newStreamTask = streamTaskService.createTask(streamJob.getId(), (Integer) JobConf.FLINK_JOB_STATUS_RUNNING().getValue(), username);
+ streamTask.setId(newStreamTask.getId());
+ streamTask.setVersion(newStreamTask.getVersion());
+ streamTask.setErrDesc("");
+ streamTask.setStatus(newStreamTask.getStatus());
+ streamTask.setSubmitUser(username);
+ }
+ streamTask.setStartTime(new Date());
+ streamTask.setLastUpdateTime(new Date());
+ StreamTask finalStreamTask = streamTask;
+ return withFlinkJobInfo(jobName, streamTask.getLinkisJobInfo(), flinkJobInfo -> {
+ flinkJobInfo.setApplicationId(appId);
+ flinkJobInfo.setApplicationUrl(appUrl);
+ flinkJobInfo.setName(jobName);
+ flinkJobInfo.setStatus(JobConf.getStatusString(finalStreamTask.getStatus()));
+ StreamTaskUtils.refreshInfo(finalStreamTask, flinkJobInfo);
+ streamTaskService.updateTask(finalStreamTask);
+ LOG.info("Streamis Job {} has added a new task successfully.", jobName);
+ return Message.ok();
+ });
+ });
+ }
+
+ private Message withFlinkJobInfo(String jobName, String flinkJobInfoStr, Function flinkJobInfoFunction) {
+ EngineConnJobInfo flinkJobInfo;
+ try {
+ flinkJobInfo = DWSHttpClient.jacksonJson().readValue(flinkJobInfoStr, EngineConnJobInfo.class);
+ } catch (JsonProcessingException e) {
+ LOG.error("Job {} deserialize the flinkJobInfo string to object failed!", jobName, e);
+ return Message.error("Deserialize the flinkJobInfo string to object failed!");
+ }
+ return flinkJobInfoFunction.apply(flinkJobInfo);
+ }
+
+ @RequestMapping(path = "/updateTask", method = RequestMethod.GET)
+ public Message updateTask(HttpServletRequest req,
+ @RequestParam(value = "projectName") String projectName,
+ @RequestParam(value = "jobName") String jobName,
+ @RequestParam(value = "appId") String appId,
+ @RequestParam(value = "metrics") String metrics) {
+ String username = ModuleUserUtils.getOperationUser(req, "update task");
+ LOG.info("User {} try to update task for Streamis job {}.{} with appId: {}, metrics: {}.", username, projectName, jobName, appId, metrics);
+ return withStreamJob(req, projectName, jobName, username, streamJob -> {
+ StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId());
+ if (streamTask == null) {
+ LOG.warn("Job {} is not exists running task, ignore to update its metrics.", jobName);
+ return Message.ok("not exists running task, ignore it.");
+ } else if (JobConf.isCompleted(streamTask.getStatus())) {
+ LOG.warn("The task of job {} is completed, ignore to update its metrics.", jobName);
+ return Message.ok("Task is completed, ignore to update its metrics.");
+ }
+ return withFlinkJobInfo(jobName, streamTask.getLinkisJobInfo(), flinkJobInfo -> {
+ if (!flinkJobInfo.getApplicationId().equals(appId)) {
+ LOG.warn("Job {} with running task is not equals to the request appId: {}, ignore to update its metrics.",
+ jobName, flinkJobInfo.getApplicationId(), appId);
+ return Message.ok("the request appId is not equals to the running task appId " + flinkJobInfo.getApplicationId());
+ }
+ JobStateInfo jobStateInfo = new JobStateInfo();
+ jobStateInfo.setTimestamp(System.currentTimeMillis());
+ jobStateInfo.setLocation(metrics);
+ flinkJobInfo.setJobStates(new JobStateInfo[]{jobStateInfo});
+ StreamTaskUtils.refreshInfo(streamTask, flinkJobInfo);
+ streamTaskService.updateTask(streamTask);
+ LOG.info("Streamis Job {} has updated the task metrics successfully.", jobName);
+ return Message.ok();
+ });
+ });
+ }
+
+ @RequestMapping(path = "/updateTask", method = RequestMethod.POST)
+ public Message updateTask(HttpServletRequest req,
+ @RequestBody Map json) {
+ String projectName = json.get("projectName");
+ String jobName = json.get("jobName");
+ String appId = json.get("appId");
+ String metrics = json.get("metrics");
+ return updateTask(req, projectName, jobName, appId, metrics);
+ }
+
+ @RequestMapping(path = "/stopTask", method = RequestMethod.GET)
+ public Message stopTask(HttpServletRequest req,
+ @RequestParam(value = "projectName") String projectName,
+ @RequestParam(value = "jobName") String jobName,
+ @RequestParam(value = "appId") String appId,
+ @RequestParam(value = "appUrl") String appUrl) {
+ String username = ModuleUserUtils.getOperationUser(req, "stop task");
+ LOG.info("User {} try to stop task for Streamis job {}.{} with appId: {}, appUrl: {}.", username, projectName, jobName, appId, appUrl);
+ return withStreamJob(req, projectName, jobName, username,
+ streamJob -> tryStopTask(streamJob, appId));
+ }
+
+ private Message tryStopTask(StreamJob streamJob, String appId) {
+ // 如果存在正在运行的,将其停止掉
+ StreamTask streamTask = streamTaskService.getLatestTaskByJobId(streamJob.getId());
+ if(streamTask != null && JobConf.isRunning(streamTask.getStatus())) {
+ return withFlinkJobInfo(streamJob.getName(), streamTask.getLinkisJobInfo(), flinkJobInfo -> {
+ if(appId == null || flinkJobInfo.getApplicationId().equals(appId)) {
+ LOG.warn("Streamis Job {} is exists running task, update its status to stopped.", streamJob.getName());
+ streamTask.setStatus((Integer) JobConf.FLINK_JOB_STATUS_STOPPED().getValue());
+ streamTask.setErrDesc("stopped by App itself.");
+ streamTaskService.updateTask(streamTask);
+ return Message.ok();
+ } else {
+ LOG.warn("Job {} with running task is not equals to the request appId: {}, ignore to stop it.",
+ streamJob.getName(), flinkJobInfo.getApplicationId(), appId);
+ return Message.ok("the request appId is not equals to the running task appId " + flinkJobInfo.getApplicationId());
+ }
+ });
+ } else {
+ LOG.warn("Streamis Job {} is not exists running task, ignore to stop it.", streamJob.getName());
+ return Message.ok();
+ }
+ }
+
@RequestMapping(path = "/progress", method = RequestMethod.GET)
public Message progressJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId,
- @RequestParam(value = "version", required = false) String version) throws IOException, JobException {
- String username = SecurityFilter.getLoginUsername(req);
+ @RequestParam(value = "version", required = false) String version) throws JobException {
+ String username = ModuleUserUtils.getOperationUser(req, "view the job's progress");
if (jobId == null) {
throw JobExceptionManager.createException(30301, "jobId");
}
StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if(streamJob == null) {
+ return Message.error("not exists job " + jobId);
+ } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) {
+ return Message.error("Job " + streamJob.getName() + " is not supported to get progress.");
+ }
if (!streamJobService.hasPermission(streamJob, username) &&
!this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) {
return Message.error("Have no permission to view the progress of StreamJob [" + jobId + "]");
@@ -257,7 +597,7 @@ public Message progressJob(HttpServletRequest req, @RequestParam(value = "jobId"
@RequestMapping(path = "/jobContent", method = RequestMethod.GET)
public Message uploadDetailsJob(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId,
@RequestParam(value = "version", required = false) String version) {
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req, "view job content");
StreamJob streamJob = this.streamJobService.getJobById(jobId);
if (!streamJobService.hasPermission(streamJob, username) &&
!this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) {
@@ -269,9 +609,8 @@ public Message uploadDetailsJob(HttpServletRequest req, @RequestParam(value = "j
@RequestMapping(path = "/alert", method = RequestMethod.GET)
public Message getAlert(HttpServletRequest req, @RequestParam(value = "jobId", required = false) Long jobId,
- @RequestParam(value = "version", required = false) String version) {
- String username = SecurityFilter.getLoginUsername(req);
-
+ @RequestParam(value = "version", required = false) String version) {
+ String username = ModuleUserUtils.getOperationUser(req, "get alert message list");
return Message.ok().data("list", streamJobService.getAlert(username, jobId, version));
}
@@ -289,11 +628,17 @@ public Message getLog(HttpServletRequest req,
throw JobExceptionManager.createException(30301, "jobId");
}
logType = StringUtils.isBlank(logType) ? "client" : logType;
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req, "view job logs");
StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if(streamJob == null) {
+ return Message.error("not exists job " + jobId);
+ } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType()) &&
+ "client".equals(logType)) {
+ return Message.error("Job " + streamJob.getName() + " is not supported to get client logs.");
+ }
if (!streamJobService.hasPermission(streamJob, username) &&
- !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())) {
- return Message.error("Have no permission to fetch logs from StreamJob [" + jobId + "]");
+ !this.privilegeService.hasAccessPrivilege(req, streamJob.getProjectName())){
+ return Message.error("Have no permission to get Job details of StreamJob [" + jobId + "]");
}
LogRequestPayload payload = new LogRequestPayload();
payload.setFromLine(fromLine);
@@ -334,8 +679,13 @@ public Message status(@RequestBody Map> requestMap){
public Message snapshot(@PathVariable("jobId")Long jobId, HttpServletRequest request){
Message result = Message.ok();
try{
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "do snapshot of job");
StreamJob streamJob = this.streamJobService.getJobById(jobId);
+ if(streamJob == null) {
+ return Message.error("not exists job " + jobId);
+ } else if(!JobConf.SUPPORTED_MANAGEMENT_JOB_TYPES().getValue().contains(streamJob.getJobType())) {
+ return Message.error("Job " + streamJob.getName() + " is not supported to do snapshot.");
+ }
if (!streamJobService.hasPermission(streamJob, username) &&
!this.privilegeService.hasEditPrivilege(request, streamJob.getProjectName())){
return Message.error("Have no permission to do snapshot for StreamJob [" + jobId + "]");
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java
index c85e22ced..29433ad81 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/ProjectRestfulApi.java
@@ -20,7 +20,7 @@
import com.webank.wedatasphere.streamis.jobmanager.manager.service.StreamJobService;
import org.apache.commons.lang.StringUtils;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
@@ -41,7 +41,7 @@ public Message getView(HttpServletRequest req, @RequestParam(value= "projectName
if(StringUtils.isBlank(projectName)){
throw new ProjectException("params cannot be empty!");
}
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req);
TaskCoreNumVo taskCoreNumVO = streamJobService.countByCores(projectName,username);
return Message.ok().data("taskCore",taskCoreNumVO);
}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java
index 9b800ce5f..6a285a7e8 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/restful/api/UploadRestfulApi.java
@@ -27,7 +27,7 @@
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -43,7 +43,10 @@
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
+import java.nio.file.*;
+import java.nio.file.attribute.BasicFileAttributes;
import java.util.List;
+import java.util.Objects;
@RequestMapping(path = "/streamis/streamJobManager/job")
@RestController
@@ -65,7 +68,7 @@ public Message uploadJar(HttpServletRequest request,
@RequestParam(name = "projectName", required = false) String projectName,
@RequestParam(name = "file") List files) throws IOException, JobException {
- String userName = SecurityFilter.getLoginUsername(request);
+ String userName = ModuleUserUtils.getOperationUser(request, "upload job zip file");
if (files == null || files.size() <= 0) {
throw JobExceptionManager.createException(30300, "uploaded files");
}
@@ -80,23 +83,56 @@ public Message uploadJar(HttpServletRequest request,
}
InputStream is = null;
OutputStream os = null;
- try{
- String inputPath = IoUtils.generateIOPath(userName, "streamis", fileName);
- File file = new File(inputPath);
- if(file.getParentFile().exists()){
+ File file = null;
+ String inputPath = null;
+ try {
+ inputPath = IoUtils.generateIOPath(userName, "streamis", fileName);
+ file = new File(inputPath);
+ if (file.getParentFile().exists()) {
FileUtils.deleteDirectory(file.getParentFile());
}
is = p.getInputStream();
os = IoUtils.generateExportOutputStream(inputPath);
IOUtils.copy(is, os);
StreamJobVersion job = streamJobService.uploadJob(projectName, userName, inputPath);
- return Message.ok().data("jobId",job.getJobId());
- } catch (Exception e){
+ return Message.ok().data("jobId", job.getJobId());
+ } catch (Exception e) {
LOG.error("Failed to upload zip {} to project {} for user {}.", fileName, projectName, userName, e);
return Message.error(ExceptionUtils.getRootCauseMessage(e));
- } finally{
+ } finally {
IOUtils.closeQuietly(os);
IOUtils.closeQuietly(is);
+ //Delete the temporary file
+ if (Objects.nonNull(file) && file.exists()) {
+ Path path = Paths.get(inputPath.replace("/" + fileName, ""));
+ Files.walkFileTree(path,
+ new SimpleFileVisitor() {
+ // 先去遍历删除文件
+ @Override
+ public FileVisitResult visitFile(Path file,
+ BasicFileAttributes attrs) throws IOException {
+ try {
+ Files.delete(file);
+ } catch (IOException e) {
+ LOG.warn("Fail to delete the input job file, please examine the local system environment");
+ }
+ return FileVisitResult.CONTINUE;
+ }
+
+ // 再去遍历删除目录
+ @Override
+ public FileVisitResult postVisitDirectory(Path dir,
+ IOException exc) throws IOException {
+ try {
+ Files.delete(dir);
+ } catch (IOException e) {
+ LOG.warn("Fail to delete the input job file, please examine the local system environment");
+ }
+ return FileVisitResult.CONTINUE;
+ }
+ }
+ );
+ }
}
}
}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java
index 77b4fb618..022bc4ee5 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/service/impl/UserServiceImpl.java
@@ -41,7 +41,7 @@ public List workspaceUserQuery(HttpServletRequest req,String workspaceId
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
headers.add("Cookie", req.getHeader("Cookie"));
- HttpEntity httpEntity = new HttpEntity(headers);
+ HttpEntity httpEntity = new HttpEntity<>(headers);
ResponseEntity response = restTemplate.exchange(url, HttpMethod.GET, httpEntity, Map.class);
Map data = (Map) response.getBody().get("data");
List> workspaceUsers = data.get("workspaceUsers");
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/utils/RegularUtil.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/utils/RegularUtil.java
new file mode 100644
index 000000000..619aa7f2a
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/utils/RegularUtil.java
@@ -0,0 +1,34 @@
+package com.webank.wedatasphere.streamis.jobmanager.utils;
+
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.List;
+
+public class RegularUtil {
+
+ private RegularUtil(){}
+
+ public static final String LETTER_PATTERN="^.*[a-zA-Z]+.*$";//字母
+ public static final String NUMBER_PATTERN="^.*[0-9]+.*$";//数字
+ public static final String SPECIAL_CHAR_PATTERN="^.*[/^/$/.//,;:'!@#%&/*/|/?/+/(/)/[/]/{/}]+.*$";//特殊字符
+ public static final String PW_LENGTH_PATTERN="^.{0,64}$";//字符长度
+ public static final String PATTERN ="^[A-Za-z0-9_,.]{1,64}$" ; //大小写字母数字下划线逗号小数点
+
+
+ public static boolean matches(String input) {
+ if (StringUtils.isNotBlank(input)){
+ return input.matches(PATTERN);
+ }
+ return false;
+ }
+
+ public static boolean matches(List userList) {
+ if (userList.isEmpty()){
+ return userList.toString().matches(LETTER_PATTERN);
+ }
+ return false;
+ }
+
+
+
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkUpdateLabel.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkUpdateLabel.java
new file mode 100644
index 000000000..f4da31bd9
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkUpdateLabel.java
@@ -0,0 +1,23 @@
+package com.webank.wedatasphere.streamis.jobmanager.vo;
+
+public class BulkUpdateLabel {
+ private Long id;
+
+ private String label;
+
+ public Long getId() {
+ return id;
+ }
+
+ public void setId(Long id) {
+ this.id = id;
+ }
+
+ public String getLabel() {
+ return label;
+ }
+
+ public void setLabel(String label) {
+ this.label = label;
+ }
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkUpdateLabelRequest.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkUpdateLabelRequest.java
new file mode 100644
index 000000000..a98821d80
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/BulkUpdateLabelRequest.java
@@ -0,0 +1,15 @@
+package com.webank.wedatasphere.streamis.jobmanager.vo;
+
+import java.util.List;
+
+public class BulkUpdateLabelRequest {
+ private List tasks;
+
+ public List getTasks() {
+ return tasks;
+ }
+
+ public void setTasks(List tasks) {
+ this.tasks = tasks;
+ }
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/LinkisResponse.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/LinkisResponse.java
new file mode 100644
index 000000000..ba9dc1606
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/LinkisResponse.java
@@ -0,0 +1,39 @@
+package com.webank.wedatasphere.streamis.jobmanager.vo;
+
+public class LinkisResponse {
+
+ private String method;
+ private int status;
+ private String message;
+ private LinkisResponseData data;
+ public void setMethod(String method) {
+ this.method = method;
+ }
+ public String getMethod() {
+ return method;
+ }
+
+ public void setStatus(int status) {
+ this.status = status;
+ }
+ public int getStatus() {
+ return status;
+ }
+
+ public void setMessage(String message) {
+ this.message = message;
+ }
+ public String getMessage() {
+ return message;
+ }
+
+ public void setData(LinkisResponseData data) {
+ this.data = data;
+ }
+ public LinkisResponseData getData() {
+ return data;
+ }
+
+}
+
+
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/LinkisResponseData.java b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/LinkisResponseData.java
new file mode 100644
index 000000000..3cdefccba
--- /dev/null
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/java/com/webank/wedatasphere/streamis/jobmanager/vo/LinkisResponseData.java
@@ -0,0 +1,36 @@
+package com.webank.wedatasphere.streamis.jobmanager.vo;
+
+public class LinkisResponseData {
+
+ private String solution;
+ private boolean historyAdmin;
+ private String errorMsgTip;
+ private boolean admin;
+ public void setSolution(String solution) {
+ this.solution = solution;
+ }
+ public String getSolution() {
+ return solution;
+ }
+
+ public void setHistoryAdmin(boolean historyAdmin) {
+ this.historyAdmin = historyAdmin;
+ }
+ public boolean getHistoryAdmin() {
+ return historyAdmin;
+ }
+
+ public void setErrorMsgTip(String errorMsgTip) {
+ this.errorMsgTip = errorMsgTip;
+ }
+ public String getErrorMsgTip() {
+ return errorMsgTip;
+ }
+
+ public void setAdmin(boolean admin) {
+ this.admin = admin;
+ }
+ public boolean getAdmin() {
+ return admin;
+ }
+}
diff --git a/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala b/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala
index 89718ba85..01b6de8ef 100644
--- a/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala
+++ b/streamis-jobmanager/streamis-jobmanager-server/src/main/scala/com/webank/wedatasphere/streamis/jobmanager/rpc/StreamisJobManagerReceiver.scala
@@ -38,7 +38,7 @@ class StreamisJobManagerReceiver(jobService: StreamJobService) extends Receiver
override def receive(message: Any, sender: Sender): Unit = {
-
+ //nothing
}
override def receiveAndReply(message: Any, sender: Sender): Any = receiveAndReply(message, Duration(timeout, "seconds"), sender)
diff --git a/streamis-jobmanager/streamis-projectmanager-server/pom.xml b/streamis-jobmanager/streamis-projectmanager-server/pom.xml
index c5066a706..8d56a7554 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/pom.xml
+++ b/streamis-jobmanager/streamis-projectmanager-server/pom.xml
@@ -20,7 +20,7 @@
streamis-jobmanager
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java
index 8eefbe92d..edec2de4e 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/ProjectManagerMapper.java
@@ -50,4 +50,6 @@ public interface ProjectManagerMapper {
List selectProjectNamesByIds(List ids);
String getProjectNameById(Long id);
+
+ String getProjectNameByFileId(Long id);
}
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml
index 3338203df..ae0f13b56 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/dao/impl/ProjectManagerMapper.xml
@@ -23,12 +23,12 @@
INSERT INTO linkis_stream_project_files
- (`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_by`,`comment`)
- VALUES (#{fileName},#{version},#{storePath},#{storeType},#{projectName},#{createBy},#{comment})
+ (`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_by`,`comment`,`md5`)
+ VALUES (#{fileName},#{version},#{storePath},#{storeType},#{projectName},#{createBy},#{comment},#{MD5})
- update linkis_stream_project_files set store_path = #{storePath}
+ update linkis_stream_project_files set store_path = #{storePath},update_time = #{updateTime},md5 = #{MD5}
,version = #{version}
@@ -51,7 +51,7 @@
- SELECT f.id,f.file_name,v.version,`store_path`,`store_type`,f.project_name,`create_time`,`create_by`,f.comment FROM
+ SELECT f.id,f.file_name,v.version,`store_path`,`store_type`,f.project_name,`create_time`,`create_by`,f.comment,f.update_time,f.md5 FROM
linkis_stream_project_files f
JOIN (SELECT MAX(VERSION) VERSION,file_name,project_name FROM linkis_stream_project_files GROUP BY file_name,project_name) v
ON f.version= v.version AND f.file_name = v.file_name AND f.project_name = v.project_name
@@ -65,19 +65,19 @@
- select `id`,`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by`,`comment` from
+ select `id`,`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by`,`comment`,`update_time`,`md5` from
linkis_stream_project_files
where project_name = #{projectName} and file_name = #{fileName} ORDER BY version desc
- select `id`, `file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by` from
+ select `id`, `file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by`,`update_time`,`md5` from
linkis_stream_project_files where project_name = #{projectName} and version = #{version} and file_name =
#{fileName}
- select `id`,`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by` from
+ select `id`,`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by`,`update_time`,`md5` from
linkis_stream_project_files where id = #{id}
@@ -93,11 +93,11 @@
- select `id`,`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by` from
+ select `id`,`file_name`,`version`,`store_path`,`store_type`,`project_name`,`create_time`,`create_by`,`update_time`,`md5` from
linkis_stream_project_files where id = #{id}
-
+
select distinct project_name from linkis_stream_project_files where id in
#{id}
@@ -108,4 +108,10 @@
SELECT j.project_name FROM linkis_stream_job_version_files jvf LEFT JOIN linkis_stream_job j ON jvf.job_id =j.id
WHERE jvf.id = #{id}
+
+
+ SELECT jvf.project_name FROM linkis_stream_project_files jvf
+ WHERE jvf.id = #{id}
+
+
\ No newline at end of file
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java
index 4c13a8550..278a943d4 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/entity/ProjectFiles.java
@@ -42,6 +42,11 @@ public class ProjectFiles implements StreamisFile {
private String createBy;
+
+ private Date updateTime;
+
+ private String MD5;
+
public String getComment() {
return comment;
}
@@ -118,4 +123,20 @@ public String getCreateBy() {
public void setCreateBy(String createBy) {
this.createBy = createBy;
}
+
+ public Date getUpdateTime() {
+ return updateTime;
+ }
+
+ public void setUpdateTime(Date updateTime) {
+ this.updateTime = updateTime;
+ }
+
+ public String getMD5() {
+ return MD5;
+ }
+
+ public void setMD5(String MD5) {
+ this.MD5 = MD5;
+ }
}
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java
index 04d8ed405..6b59861cb 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/restful/api/ProjectManagerRestfulApi.java
@@ -30,7 +30,7 @@
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -67,15 +67,14 @@ public Message upload(HttpServletRequest req,
@RequestParam(name = "comment", required = false) String comment,
@RequestParam(name = "updateWhenExists", required = false) boolean updateWhenExists,
@RequestParam(name = "file") List files) throws UnsupportedEncodingException, FileException {
-
-
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req, "upload project files");
if (StringUtils.isBlank(version)) {
return Message.error("version is null");
}
if (StringUtils.isBlank(projectName)) {
return Message.error("projectName is null");
}
+ if (version.length()>=30) return Message.error("version character length is to long ,Please less than 30 (版本字符长度过长,请小于30)");
if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) return Message.error("the current user has no operation permission");
//Only uses 1st file(只取第一个文件)
@@ -136,7 +135,6 @@ public Message versionList( HttpServletRequest req, @RequestParam(value = "fileN
@RequestParam(value = "projectName",required = false) String projectName,
@RequestParam(value = "pageNow",defaultValue = "1") Integer pageNow,
@RequestParam(value = "pageSize",defaultValue = "20") Integer pageSize) {
- String username = SecurityFilter.getLoginUsername(req);
if (StringUtils.isBlank(projectName)) {
return Message.error("projectName is null");
}
@@ -159,7 +157,7 @@ public Message versionList( HttpServletRequest req, @RequestParam(value = "fileN
@RequestMapping(path = "/files/delete", method = RequestMethod.GET)
public Message delete( HttpServletRequest req, @RequestParam(value = "fileName",required = false) String fileName,
@RequestParam(value = "projectName",required = false) String projectName) {
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req, "Delete file:" + fileName + " in project: " + projectName);
if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) return Message.error("the current user has no operation permission");
return projectManagerService.delete(fileName, projectName, username) ? Message.ok()
@@ -168,7 +166,7 @@ public Message delete( HttpServletRequest req, @RequestParam(value = "fileName",
@RequestMapping(path = "/files/version/delete", method = RequestMethod.GET)
public Message deleteVersion(HttpServletRequest req, @RequestParam(value = "ids",required = false) String ids) {
- String username = SecurityFilter.getLoginUsername(req);
+ String username = ModuleUserUtils.getOperationUser(req, "Delete file versions in project");
List idList = new ArrayList<>();
if (!StringUtils.isBlank(ids) && !ArrayUtils.isEmpty(ids.split(","))) {
String[] split = ids.split(",");
@@ -176,8 +174,8 @@ public Message deleteVersion(HttpServletRequest req, @RequestParam(value = "ids"
idList.add(Long.parseLong(s));
}
}
- List projectNames = projectManagerService.getProjectNames(idList);
- if (!projectPrivilegeService.hasEditPrivilege(req,projectNames)) {
+ String projectName = projectManagerService.getProjectNameByFileId(Long.valueOf(ids));
+ if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) {
return Message.error("the current user has no operation permission");
}
@@ -188,6 +186,9 @@ public Message deleteVersion(HttpServletRequest req, @RequestParam(value = "ids"
@RequestMapping(path = "/files/download", method = RequestMethod.GET)
public Message download( HttpServletRequest req, HttpServletResponse response, @RequestParam(value = "id",required = false) Long id,
@RequestParam(value = "projectName",required = false)String projectName) {
+ if(StringUtils.isBlank(projectName)){
+ projectName = projectManagerService.getProjectNameByFileId(id);
+ }
ProjectFiles projectFiles = projectManagerService.getFile(id, projectName);
if (projectFiles == null) {
return Message.error("no such file in this project");
@@ -195,9 +196,6 @@ public Message download( HttpServletRequest req, HttpServletResponse response, @
if (StringUtils.isBlank(projectFiles.getStorePath())) {
return Message.error("storePath is null");
}
- if(StringUtils.isBlank(projectName)){
- projectName = projectManagerService.getProjectNameById(id);
- }
if (!projectPrivilegeService.hasEditPrivilege(req,projectName)) return Message.error("the current user has no operation permission");
response.setContentType("application/x-download");
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java
index 9ff146678..6df103c49 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/ProjectManagerService.java
@@ -48,4 +48,6 @@ public interface ProjectManagerService {
List getProjectNames(List ids);
String getProjectNameById(Long id);
+
+ String getProjectNameByFileId(Long id);
}
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java
index ded1b101c..1c1aa4cae 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/service/impl/ProjectManagerServiceImpl.java
@@ -16,6 +16,7 @@
package com.webank.wedatasphere.streamis.projectmanager.service.impl;
import com.fasterxml.jackson.core.JsonProcessingException;
+import com.webank.wedatasphere.streamis.projectmanager.utils.MD5Utils;
import org.apache.commons.collections.CollectionUtils;
import org.apache.linkis.common.utils.JsonUtils;
import com.webank.wedatasphere.streamis.jobmanager.manager.entity.StreamisFile;
@@ -49,6 +50,7 @@ public class ProjectManagerServiceImpl implements ProjectManagerService, Streami
@Override
@Transactional(rollbackFor = Exception.class)
public void upload(String username, String fileName, String version, String projectName, String filePath,String comment) throws JsonProcessingException {
+ String fileMd5 = MD5Utils.getMD5(filePath);
Map result = bmlService.upload(username, filePath);
ProjectFiles projectFiles = new ProjectFiles();
projectFiles.setFileName(fileName);
@@ -58,12 +60,13 @@ public void upload(String username, String fileName, String version, String proj
projectFiles.setProjectName(projectName);
ReaderUtils readerUtils = new ReaderUtils();
projectFiles.setStorePath(readerUtils.readAsJson(result.get("version").toString(),result.get("resourceId").toString()));
+ projectFiles.setMD5(fileMd5);
ProjectFiles file = selectFile(fileName, version, projectName);
if (file == null) {
projectManagerMapper.insertProjectFilesInfo(projectFiles);
}else {
projectFiles.setId(file.getId());
- projectFiles.setVersion(version);
+ projectFiles.setUpdateTime(new Date());
projectManagerMapper.updateFileById(projectFiles);
}
}
@@ -128,7 +131,7 @@ public ProjectFiles selectFile(String fileName, String version, String projectNa
@Override
public List getProjectNames(List ids) {
if(CollectionUtils.isEmpty(ids)){
- return null;
+ return Collections.emptyList();
}
return projectManagerMapper.selectProjectNamesByIds(ids);
}
@@ -137,4 +140,9 @@ public List getProjectNames(List ids) {
public String getProjectNameById(Long id) {
return projectManagerMapper.getProjectNameById(id);
}
+
+ @Override
+ public String getProjectNameByFileId(Long id) {
+ return projectManagerMapper.getProjectNameByFileId(id);
+ }
}
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java
index 355d0a5da..8cd97d9b7 100644
--- a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/DateUtils.java
@@ -18,6 +18,7 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
@@ -30,6 +31,7 @@ public class DateUtils {
private static final String FORMAT_HH_MM_SS_S = "yyyy-MM-dd HH:mm:ss.S";
private static final String FORMAT_HH_MM = "yyyy-MM-dd HH:mm";
+ private DateUtils(){}
/**
* contain T,Z format date time convert
@@ -38,10 +40,9 @@ public class DateUtils {
* @return
* @throws Exception
*/
- public static String dateTimeTZConvert(String dateTime) throws Exception {
+ public static String dateTimeTZConvert(String dateTime) throws ParseException {
Date date = new SimpleDateFormat(FORMAT_HH_T_Z).parse(dateTime);
- String time = new SimpleDateFormat(FORMAT_HH_MM_SS).format(date);
- return time;
+ return new SimpleDateFormat(FORMAT_HH_MM_SS).format(date);
}
/**
diff --git a/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/MD5Utils.java b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/MD5Utils.java
new file mode 100644
index 000000000..7c55011ad
--- /dev/null
+++ b/streamis-jobmanager/streamis-projectmanager-server/src/main/java/com/webank/wedatasphere/streamis/projectmanager/utils/MD5Utils.java
@@ -0,0 +1,79 @@
+package com.webank.wedatasphere.streamis.projectmanager.utils;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.*;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.Arrays;
+
+public class MD5Utils {
+ private MD5Utils(){}
+
+ private static final Logger logger = LoggerFactory.getLogger(MD5Utils.class);
+
+ public static String getMD5(String filePath) {
+ byte[] key = getBytes(filePath);
+ String cacheKey;
+ try {
+ final MessageDigest mDigest = MessageDigest.getInstance("MD5");
+ mDigest.update(key);
+ cacheKey = bytesToHexString(mDigest.digest());
+ } catch (NoSuchAlgorithmException e) {
+ cacheKey = String.valueOf(Arrays.hashCode(key));
+ }
+ return cacheKey;
+ }
+
+ private static String bytesToHexString(byte[] bytes) {
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < bytes.length; i++) {
+ String hex = Integer.toHexString(0xFF & bytes[i]);
+ if (hex.length() == 1) {
+ sb.append('0');
+ }
+ sb.append(hex);
+ }
+ return sb.toString();
+ }
+
+ private static byte[] getBytes(String filePath){
+ byte[] buffer = null;
+ FileInputStream fis = null;
+ ByteArrayOutputStream bos = null;
+ try {
+ File file = new File(filePath);
+ fis = new FileInputStream(file);
+ bos = new ByteArrayOutputStream(1000);
+ byte[] b = new byte[1000];
+ int n;
+ while ((n = fis.read(b)) != -1) {
+ bos.write(b, 0, n);
+ }
+ buffer = bos.toByteArray();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+
+ try{
+ if(fis != null){
+ fis.close();
+ }
+ }catch(Exception e){
+ logger.error("关闭输入流错误!", e);
+ }
+
+ try{
+ if(bos != null){
+ bos.close();
+ }
+ }catch(Exception e){
+ logger.error("关闭输出流错误!", e);
+ }
+ }
+ return buffer;
+ }
+
+
+}
diff --git a/streamis-project/pom.xml b/streamis-project/pom.xml
index e5bbcf888..9b1d294dc 100644
--- a/streamis-project/pom.xml
+++ b/streamis-project/pom.xml
@@ -20,7 +20,7 @@
streamis
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/streamis-project/streamis-project-common/pom.xml b/streamis-project/streamis-project-common/pom.xml
index 849710b20..c7a3a6d45 100644
--- a/streamis-project/streamis-project-common/pom.xml
+++ b/streamis-project/streamis-project-common/pom.xml
@@ -20,7 +20,7 @@
streamis-project
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
diff --git a/streamis-project/streamis-project-server/pom.xml b/streamis-project/streamis-project-server/pom.xml
index 880d5bf0b..bb07631eb 100644
--- a/streamis-project/streamis-project-server/pom.xml
+++ b/streamis-project/streamis-project-server/pom.xml
@@ -20,7 +20,7 @@
streamis-project
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
@@ -68,7 +68,7 @@
com.webank.wedatasphere.dss
dss-sso-integration-standard
- 1.0.1
+ ${dss.version}
compile
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java
index ce371cf47..2387dfe3c 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/constant/ProjectUserPrivilegeEnum.java
@@ -19,15 +19,9 @@ public int getRank() {
return rank;
}
- public void setRank(int rank) {
- this.rank = rank;
- }
public String getName() {
return name;
}
- public void setName(String name) {
- this.name = name;
- }
}
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java
index d60a10508..99dd9d85b 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/StreamisProjectMapper.java
@@ -2,6 +2,7 @@
import com.webank.wedatasphere.streamis.project.server.entity.StreamisProject;
import org.apache.ibatis.annotations.Mapper;
+import org.apache.ibatis.annotations.Param;
import java.util.List;
@@ -23,5 +24,5 @@ public interface StreamisProjectMapper {
void updateProject(StreamisProject streamisProject);
- List findProjectIdsByNames(List names);
+ List findProjectIdsByNames(@Param("names") List names);
}
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml
index 8563cb82b..fa0fd52b3 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/dao/impl/projectMapper.xml
@@ -51,7 +51,7 @@
select id from linkis_stream_project where name in
-
+
#{name}
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java
index a7ef12d18..d8397976d 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/exception/ValidExceptionHander.java
@@ -25,7 +25,7 @@ public Message handle(MethodArgumentNotValidException e){
LOG.error("Request parameter validation exception", e);
BindingResult bindingResult = e.getBindingResult();
StringBuilder stringBuilder = new StringBuilder();
- bindingResult.getFieldErrors().forEach((item) -> stringBuilder.append(item.getDefaultMessage()).append(";"));
+ bindingResult.getFieldErrors().forEach(item -> stringBuilder.append(item.getDefaultMessage()).append(";"));
return Message.error("failed to validate request parameter, detail:"+stringBuilder.toString());
}
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java
index 9bb11542c..a8e4ef3b6 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectPrivilegeRestfulApi.java
@@ -7,7 +7,7 @@
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -36,7 +36,7 @@ public class StreamisProjectPrivilegeRestfulApi {
@RequestMapping(path = "/getProjectPrivilege", method = RequestMethod.GET)
public Message getProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId,
@RequestParam(value = "projectName", required = false) String projectName) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain project[id:{} name:{}] privilege",username,projectId,projectName);
try {
if(projectId==null || projectId == 0) {
@@ -55,7 +55,7 @@ public Message getProjectPrivilege(HttpServletRequest request, @RequestParam(val
@RequestMapping(path = "/hasReleasePrivilege", method = RequestMethod.GET)
public Message hasReleaseProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId,
@RequestParam(value = "projectName", required = false) String projectName) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain project[id:{} name:{}] release privilege",username,projectId,projectName);
try {
if(projectId==null || projectId == 0) {
@@ -74,7 +74,7 @@ public Message hasReleaseProjectPrivilege(HttpServletRequest request, @RequestPa
@RequestMapping(path = "/hasEditPrivilege", method = RequestMethod.GET)
public Message hasEditProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId,
@RequestParam(value = "projectName", required = false) String projectName) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain project[id:{} name:{}] edit privilege",username,projectId,projectName);
try {
if(projectId==null || projectId == 0) {
@@ -93,7 +93,7 @@ public Message hasEditProjectPrivilege(HttpServletRequest request, @RequestParam
@RequestMapping(path = "/hasAccessPrivilege", method = RequestMethod.GET)
public Message hasAccessProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId,
@RequestParam(value = "projectName", required = false) String projectName) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain project[id:{} name:{}] access privilege",username,projectId,projectName);
try {
if(projectId==null || projectId == 0) {
@@ -112,7 +112,7 @@ public Message hasAccessProjectPrivilege(HttpServletRequest request, @RequestPar
@RequestMapping(path = "/bulk/hasReleasePrivilege", method = RequestMethod.GET)
public Message hasReleaseProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectIds", required = false) List projectIds,
@RequestParam(value = "projectNames", required = false) List projectNames) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain bulk project[id:{} name:{}] release privilege",username,projectIds,projectNames);
try {
projectIds = Optional.ofNullable(projectIds).orElse(new ArrayList<>());
@@ -133,7 +133,7 @@ public Message hasReleaseProjectPrivilege(HttpServletRequest request, @RequestPa
@RequestMapping(path = "/bulk/hasEditPrivilege", method = RequestMethod.GET)
public Message hasEditProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectIds", required = false) List projectIds,
@RequestParam(value = "projectNames", required = false) List projectNames) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain bulk project[id:{} name:{}] edit privilege",username,projectIds,projectNames);
try {
projectIds = Optional.ofNullable(projectIds).orElse(new ArrayList<>());
@@ -154,7 +154,7 @@ public Message hasEditProjectPrivilege(HttpServletRequest request, @RequestParam
@RequestMapping(path = "/bulk/hasAccessPrivilege", method = RequestMethod.GET)
public Message hasAccessProjectPrivilege(HttpServletRequest request, @RequestParam(value = "projectIds", required = false) List projectIds,
@RequestParam(value = "projectNames", required = false) List projectNames) {
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request);
LOGGER.info("user {} obtain bulk project[id:{} name:{}] access privilege",username,projectIds,projectNames);
try {
projectIds = Optional.ofNullable(projectIds).orElse(new ArrayList<>());
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java
index 8f5ba414f..90e390514 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/restful/StreamisProjectRestfulApi.java
@@ -11,7 +11,7 @@
import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.commons.math3.util.Pair;
import org.apache.linkis.server.Message;
-import org.apache.linkis.server.security.SecurityFilter;
+import org.apache.linkis.server.utils.ModuleUserUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@@ -41,7 +41,7 @@ public class StreamisProjectRestfulApi {
@RequestMapping(path = "/createProject", method = RequestMethod.POST)
public Message createProject( HttpServletRequest request,@Validated @RequestBody CreateProjectRequest createProjectRequest){
LOGGER.info("enter createProject, requestBody is {}",createProjectRequest.toString());
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "create project");
try{
StreamisProject streamisProject = new StreamisProject(createProjectRequest.getProjectName(), createProjectRequest.getWorkspaceId());
streamisProject.setCreateBy(username);
@@ -62,7 +62,7 @@ public Message createProject( HttpServletRequest request,@Validated @RequestBody
@RequestMapping(path = "/updateProject", method = RequestMethod.PUT)
public Message updateProject( HttpServletRequest request, @Validated @RequestBody UpdateProjectRequest updateProjectRequest){
LOGGER.info("enter updateProject, requestBody is {}",updateProjectRequest.toString());
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "update project");
try{
StreamisProject streamisProject = new StreamisProject(updateProjectRequest.getProjectName(), updateProjectRequest.getWorkspaceId());
streamisProject.setId(updateProjectRequest.getProjectId());
@@ -83,7 +83,7 @@ public Message updateProject( HttpServletRequest request, @Validated @RequestBod
@RequestMapping(path = "/deleteProject", method = RequestMethod.DELETE)
public Message deleteProject( HttpServletRequest request, @RequestParam(value = "projectId", required = false) Long projectId){
LOGGER.info("enter deleteProject, requestParam projectId is {}",projectId);
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "delete project");
try{
projectService.deleteProjectById(projectId);
return StreamisProjectRestfulUtils.dealOk("delete project success");
@@ -96,7 +96,7 @@ public Message deleteProject( HttpServletRequest request, @RequestParam(value =
@RequestMapping(path = "/searchProject", method = RequestMethod.GET)
public Message searchProject( HttpServletRequest request,@RequestParam(value = "projectName", required = false) String projectName){
LOGGER.info("enter searchProject, requestParam projectName is {}",projectName);
- String username = SecurityFilter.getLoginUsername(request);
+ String username = ModuleUserUtils.getOperationUser(request, "search project");
try{
List projectIds = projectService.queryProjectIds(projectName);
return StreamisProjectRestfulUtils.dealOk("search project success",
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java
index cd0e4a940..52fd1f737 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/service/impl/StreamisProjectPrivilegeServiceImpl.java
@@ -149,7 +149,7 @@ public boolean hasAccessProjectPrivilege(List projectIds, String username)
.filter(privilege -> username!=null && username.equals(privilege.getUserName())
&& (ProjectUserPrivilegeEnum.RELEASE.getRank() == privilege.getPrivilege()
|| ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege()
- || ProjectUserPrivilegeEnum.EDIT.getRank() == privilege.getPrivilege()))
+ || ProjectUserPrivilegeEnum.ACCESS.getRank() == privilege.getPrivilege()))
.collect(Collectors.toList());
return CollectionUtils.isNotEmpty(privilegeList);
}
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java
index 0fbfc5c79..2df77837d 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectPrivilegeUtils.java
@@ -7,6 +7,11 @@
import java.util.List;
public class StreamisProjectPrivilegeUtils {
+
+
+ private StreamisProjectPrivilegeUtils(){}
+
+
public static List createStreamisProjectPrivilege(Long projectId, List users, int privilege){
List retList = new ArrayList<>();
if(CollectionUtils.isEmpty(users)){
diff --git a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java
index 050deb6f8..e24c8875e 100644
--- a/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java
+++ b/streamis-project/streamis-project-server/src/main/java/com/webank/wedatasphere/streamis/project/server/utils/StreamisProjectRestfulUtils.java
@@ -18,7 +18,7 @@ public static Message dealOk(String msg){
return Message.ok(msg);
}
-
+ private StreamisProjectRestfulUtils () {}
@SafeVarargs
public static Message dealOk(String msg, Pair... data){
diff --git a/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala b/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala
index b74d9631b..5a2969c59 100644
--- a/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala
+++ b/streamis-project/streamis-project-server/src/main/scala/com/webank/wedatasphere/streamis/project/server/receiver/StreamisProjectServerReceiver.scala
@@ -20,38 +20,13 @@ class StreamisProjectServerReceiver(projectService:StreamisProjectService) exten
override def receive(message: Any, sender: Sender): Unit = {
-
+ //nothing
}
override def receiveAndReply(message: Any, sender: Sender): Any = receiveAndReply(message, Duration.create(300, "seconds"), sender)
override def receiveAndReply(message: Any, duration: Duration, sender: Sender): Any = null
-// message match {
-// case createStreamProjectRequest: CreateStreamProjectRequest =>
-// Utils.tryCatch{
-// val streamisProject = projectService.createProject(createStreamProjectRequest)
-// CreateStreamProjectResponse(0, streamisProject.getName, streamisProject.getId, "")
-// }{
-// t => logger.error("failed to create project in streamis", t)
-// CreateStreamProjectResponse(-1, createStreamProjectRequest.projectName, -1, t.getCause.getMessage)
-// }
-// case updateStreamProjectRequest: UpdateStreamProjectRequest => Utils.tryCatch{
-// projectService.updateProject(updateStreamProjectRequest)
-// UpdateStreamProjectResponse(0, updateStreamProjectRequest.streamisProjectId, "")
-// }{
-// t => logger.error(s"failed to update project ${updateStreamProjectRequest.projectName} in streamis",t)
-// UpdateStreamProjectResponse(-1, updateStreamProjectRequest.streamisProjectId, t.getCause.getMessage)
-// }
-//
-// case deleteStreamProjectRequest: DeleteStreamProjectRequest => Utils.tryCatch{
-// projectService.deleteProject(deleteStreamProjectRequest)
-// DeleteStreamProjectResponse(0, deleteStreamProjectRequest.projectName, "")
-// }{
-// t => logger.error(s"failed to update project ${deleteStreamProjectRequest.projectName} in streamis",t)
-// DeleteStreamProjectResponse(-1, deleteStreamProjectRequest.projectName, t.getCause.getMessage)
-// }
-// case _ =>
-// }
+
diff --git a/streamis-server/bin/start-streamis-server.sh b/streamis-server/bin/start-streamis-server.sh
index f440e0f40..b05ded7a5 100644
--- a/streamis-server/bin/start-streamis-server.sh
+++ b/streamis-server/bin/start-streamis-server.sh
@@ -4,7 +4,7 @@ cd `dirname $0`
cd ..
HOME=`pwd`
export STREAMIS_HOME=$HOME
-
+export STREAMIS_CONF=${STREAMIS_CONF_DIR:-"${STREAMIS_HOME}/conf"}
export STREAMIS_PID=$HOME/bin/linkis.pid
if [[ -f "${STREAMIS_PID}" ]]; then
@@ -17,9 +17,9 @@ fi
export STREAMIS_LOG_PATH=$HOME/logs
export STREAMIS_HEAP_SIZE="1G"
-export STREAMIS_JAVA_OPTS="-Xms$STREAMIS_HEAP_SIZE -Xmx$STREAMIS_HEAP_SIZE -XX:+UseG1GC -XX:MaxPermSize=500m -agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=11729"
+export STREAMIS_JAVA_OPTS="-Xms$STREAMIS_HEAP_SIZE -Xmx$STREAMIS_HEAP_SIZE -XX:+UseG1GC -XX:MaxPermSize=500m -Xloggc:$STREAMIS_LOG_DIR/streamis-gc.log -XX:+PrintGCDateStamps "
-nohup java $STREAMIS_JAVA_OPTS -cp $HOME/conf:$HOME/lib/* org.apache.linkis.DataWorkCloudApplication 2>&1 > $STREAMIS_LOG_PATH/streamis.out &
+nohup java $STREAMIS_JAVA_OPTS -cp $STREAMIS_CONF:$HOME/lib/* org.apache.linkis.DataWorkCloudApplication 2>&1 > $STREAMIS_LOG_PATH/streamis.out &
pid=$!
if [[ -z "${pid}" ]]; then
echo "Streamis Server start failed!"
diff --git a/streamis-server/pom.xml b/streamis-server/pom.xml
index 701457496..a17aa1c6c 100644
--- a/streamis-server/pom.xml
+++ b/streamis-server/pom.xml
@@ -20,7 +20,7 @@
streamis
com.webank.wedatasphere.streamis
- 0.2.0
+ 0.3.0
4.0.0
@@ -54,6 +54,24 @@
com.webank.wedatasphere.streamis
streamis-jobmanager-server
${streamis.version}
+
+
+ jackson-mapper-asl
+ org.codehaus.jackson
+
+
+ xstream
+ com.thoughtworks.xstream
+
+
+ spring-core
+ org.springframework
+
+
+ spring-jcl
+ org.springframework
+
+
@@ -67,6 +85,25 @@
com.webank.wedatasphere.streamis
streamis-project-server
${streamis.version}
+
+
+ jackson-mapper-asl
+ org.codehaus.jackson
+
+
+
+
+
+
+ com.webank.wedatasphere.streamis
+ streamis-job-log-server
+ ${streamis.version}
+
+
+
+ com.thoughtworks.xstream
+ xstream
+ ${xstream.version}
diff --git a/streamis-server/src/main/resources/linkis.properties b/streamis-server/src/main/resources/linkis.properties
index b80410c10..9c6aaf990 100644
--- a/streamis-server/src/main/resources/linkis.properties
+++ b/streamis-server/src/main/resources/linkis.properties
@@ -20,6 +20,8 @@ wds.linkis.server.mybatis.datasource.password=pwd1
wds.linkis.gateway.ip=
wds.linkis.gateway.port=
wds.linkis.gateway.url=http://localhost:9001
+wds.dss.gateway.url=http://localhost:9001
+wds.linkis.http.is.request=ture
wds.linkis.mysql.is.encrypt=false
##restful
@@ -34,7 +36,8 @@ wds.linkis.server.restful.scan.packages=com.webank.wedatasphere.streamis.datasou
com.webank.wedatasphere.streamis.project.server.restful,\
com.webank.wedatasphere.streamis.jobmanager.restful.api,\
com.webank.wedatasphere.streamis.datasource.execute.rest,\
- com.webank.wedatasphere.streamis.projectmanager.restful.api
+ com.webank.wedatasphere.streamis.projectmanager.restful.api,\
+ com.webank.wedatasphere.streamis.jobmanager.log.server.restful
##mybatis
wds.linkis.server.mybatis.mapperLocations=\
classpath*:com/webank/wedatasphere/streamis/datasource/manager/dao/impl/*.xml,\
diff --git a/web/package.json b/web/package.json
index fc5a8bfb2..ee7463f18 100644
--- a/web/package.json
+++ b/web/package.json
@@ -1,6 +1,6 @@
{
"name": "streamis",
- "version": "0.2.0",
+ "version": "0.3.0",
"private": true,
"scripts": {
"serve": "vue-cli-service serve",
diff --git a/web/src/apps/streamis/i18n/en.json b/web/src/apps/streamis/i18n/en.json
index 8bf799ba2..0fea10f3d 100644
--- a/web/src/apps/streamis/i18n/en.json
+++ b/web/src/apps/streamis/i18n/en.json
@@ -53,9 +53,21 @@
"notEmpty": "Not empty",
"wrongFormat": "Wrong format"
},
+ "startHint": {
+ "title": "Job Starting Confirm",
+ "version": "Version Confirm",
+ "version1": "Confirm starting job with latest job",
+ "version2": "starting job",
+ "version3": "Last Started Version",
+ "snapshot": "Snapshot Confirm",
+ "snapshot1": "Confirm resuming job with snapshot",
+ "batchConfirm": "Confirm all the jobs"
+ },
"jobListTableColumns":{
"jobName": "Job name",
"jobType": "Job type",
+ "launchMode": "Launch Mode",
+ "manageMode": "Manage Mode",
"taskStatus": "Task status",
"lastReleaseTime": "Last release time",
"label": "Label",
@@ -78,6 +90,8 @@
"modalTitle":"Version detail",
"jobId":"Job id",
"version":"Version",
+ "using":"Using",
+ "versionStatus":"VersionStatus",
"description":"Description",
"createTime":"Create time",
"creator":"Creator",
@@ -179,7 +193,8 @@
"selectJar": "Please choose Jar",
"upload": "Upload",
"choosedJar" : "Choosed Jar",
- "jarError": "File must be Jar"
+ "jarError": "File must be Jar",
+ "choosedFile": "Choosed File"
},
"projectFile": {
"file": "File",
@@ -187,6 +202,7 @@
"overrideImport": "Override import",
"chooseUploadFile": "Please choose the import resource file",
"versionPlaceholder": "Only number and ., such as 0.1.0",
+ "commentLength": "The length of comment text is too long",
"versionEmpty": "Version Cannot be empty",
"fileEmpty": "File cannot be empty",
"delete": "Delete",
diff --git a/web/src/apps/streamis/i18n/zh.json b/web/src/apps/streamis/i18n/zh.json
index d19676419..90d984080 100644
--- a/web/src/apps/streamis/i18n/zh.json
+++ b/web/src/apps/streamis/i18n/zh.json
@@ -52,9 +52,21 @@
"notEmpty": "不能为空",
"wrongFormat": "格式错误"
},
+ "startHint": {
+ "title": "作业启动确认",
+ "version": "版本确认",
+ "version1": "确定以最新版本",
+ "version2": "启动作业",
+ "version3": "上次启动版本",
+ "snapshot": "快照确认",
+ "snapshot1": "确定使用如下快照恢复作业",
+ "batchConfirm": "确认所有批量作业"
+ },
"jobListTableColumns":{
"jobName": "作业名称",
"jobType": "作业类型",
+ "launchMode": "运行模式",
+ "manageMode": "管理模式",
"taskStatus": "运行状态",
"lastReleaseTime": "最近发布时间",
"label": "标签",
@@ -71,12 +83,13 @@
"snapshotInfo": "快照路径",
"startTaskTitle": "启动任务中",
"startTaskContent": "正在批量启动任务..."
-
},
"versionDetail":{
"modalTitle":"版本详情",
"jobId":"作业id",
"version":"版本",
+ "using":"使用中",
+ "versionStatus":"版本状态",
"description":"描述",
"createTime":"创建时间",
"creator":"创建者",
@@ -180,7 +193,8 @@
"choosedJar" : "选中的Jar包",
"jarError": "文件必须是Jar包",
"jarEmpty": "必须选择Jar包",
- "jobNameEmpty": "作业名称不能为空"
+ "jobNameEmpty": "作业名称不能为空",
+ "choosedFile": "选中的文件"
},
"projectFile": {
"file": "文件",
@@ -188,6 +202,7 @@
"overrideImport": "覆盖式导入",
"chooseUploadFile": "请选择您要导入的资源文件",
"versionPlaceholder": "只支持数字和.,例如0.1.0",
+ "commentLength": "文本长度过长,不能超过50个字符",
"versionEmpty": "版本不能为空",
"fileEmpty": "文件不能为空",
"delete": "删除",
diff --git a/web/src/apps/streamis/module/fileVersionDetail/index.vue b/web/src/apps/streamis/module/fileVersionDetail/index.vue
index df2f37ff7..453d80c0b 100644
--- a/web/src/apps/streamis/module/fileVersionDetail/index.vue
+++ b/web/src/apps/streamis/module/fileVersionDetail/index.vue
@@ -100,24 +100,22 @@ export default {
},
methods: {
handlePageChange(page) {
- console.log(page)
this.pageData.pageNow = page
this.$emit('refreshVersionDatas', this.pageData)
},
handlePageSizeChange(pageSize) {
- console.log(pageSize)
this.pageData.pageSize = pageSize
this.pageData.pageNow = 1
this.$emit('refreshVersionDatas', this.pageData)
},
- showVersionInfo(row) {
- console.log(row)
+ showVersionInfo() {
+ // console.log(row)
},
- showDetail(row) {
- console.log(row)
+ showDetail() {
+ // console.log(row)
},
- showLogs(row) {
- console.log(row)
+ showLogs() {
+ // console.log(row)
},
ok() {
this.$Message.info('Clicked ok')
@@ -132,13 +130,12 @@ export default {
rowData.id,
'get'
)
- .then(res => {
- console.log(res)
+ .then(() => {
this.handlePageSizeChange(this.pageData.pageSize)
this.$emit('delelteSuccess')
})
- .catch(e => {
- console.log(e)
+ .catch(err => {
+ console.log(err)
})
}
}
diff --git a/web/src/apps/streamis/module/jobConfig/index.vue b/web/src/apps/streamis/module/jobConfig/index.vue
index b6bd86f46..1c3b28912 100644
--- a/web/src/apps/streamis/module/jobConfig/index.vue
+++ b/web/src/apps/streamis/module/jobConfig/index.vue
@@ -106,7 +106,6 @@ export default {
api
.fetch('streamis/streamJobManager/config/getWorkspaceUsers', 'get')
.then(res => {
- console.log(res)
if (res && res.users) {
this.users = res.users
}
@@ -150,7 +149,6 @@ export default {
'get'
)
.then(res => {
- console.log(res)
let configs = res.def;
const valueMap = {};
const rule = {};
@@ -188,24 +186,20 @@ export default {
.catch(e => console.warn(e))
},
removeParameter(index, key) {
- console.log('removeParameter', index);
const keyValue = this.diyMap[key];
keyValue.splice(index, 1)
this.diyMap = {...this.diyMap, [key]: keyValue}
},
addParameter(key) {
- console.log('addParameter')
this.diyMap = {...this.diyMap, [key]: this.diyMap[key].concat({value: '', key: ''})}
},
async handleSaveConfig() {
- console.log('handleSaveConfig')
this.valueMap = cloneDeep(this.valueMap);
const flags = await Promise.all(Object.keys(this.$refs).map(async ref => {
const ele = this.$refs[ref][0];
if (typeof ele.validate === 'function') return ele.validate();
else return true;
}));
- console.log('flags', flags);
if (!flags.every(Boolean)) return;
this.saveLoading = true;
const configuration = {};
@@ -218,19 +212,36 @@ export default {
});
let warning = false;
let emptyWarning = false;
+ console.log('this.diyMap: ', this.diyMap);
+ let moreThanOneEmpty = 0
Object.keys(this.diyMap).forEach(key => {
configuration[key] = {};
+ console.log('key: ', key);
+ if (Object.keys(this.diyMap).length === 1 && key === 'wds.linkis.flink.custom') {
+ this.diyMap['wds.linkis.flink.custom'].forEach(item => {
+ console.log('item: ', item);
+ console.log('item.key: ', item.key);
+ console.log('item.value: ', item.value);
+ if (item.key === '' && item.value === '') moreThanOneEmpty++
+ })
+ console.log('moreThanOneEmpty: ', moreThanOneEmpty);
+ }
(this.diyMap[key] || []).forEach(mapKey => {
- emptyWarning = !mapKey.key || !mapKey.key.trim();
+ console.log('mapKey: ', mapKey);
+ if (key !== 'wds.linkis.flink.custom') emptyWarning = !mapKey.key || !mapKey.key.trim();
if (configuration[key][mapKey.key]) warning = true;
configuration[key][mapKey.key] = mapKey.value || '';
});
if ((this.diyMap[key] || []).length <= 1) {
const only = (this.diyMap[key] || [])[0] || {};
- emptyWarning = !((!only.key || !only.key.trim()) && (!only.value || !only.value.trim()))
+ console.log('only: ', only);
+ if (key !== 'wds.linkis.flink.custom') emptyWarning = (!only.key || !only.key.trim()) && (!only.value || !only.value.trim())
}
});
- console.log('configuration', configuration, this.valueMap)
+ if (moreThanOneEmpty > 1) {
+ this.saveLoading = false;
+ return this.$Message.error({ content: '不能有两个及以上空key-value' });
+ }
if (emptyWarning) {
this.saveLoading = false;
return this.$Message.error({ content: '请删除多余自定义字段,key值不能为空' });
@@ -246,15 +257,13 @@ export default {
)
.then(res => {
this.saveLoading = false
- console.log(res)
if (res.errorMsg) {
this.$Message.error(res.errorMsg.desc)
} else {
this.$Message.success(this.$t('message.streamis.operationSuccess'))
}
})
- .catch(e => {
- console.log(e)
+ .catch(() => {
this.saveLoading = false
})
}
diff --git a/web/src/apps/streamis/module/jobDetail/index.vue b/web/src/apps/streamis/module/jobDetail/index.vue
index 92e5f72cc..08dffc62a 100644
--- a/web/src/apps/streamis/module/jobDetail/index.vue
+++ b/web/src/apps/streamis/module/jobDetail/index.vue
@@ -28,7 +28,6 @@ export default {
}
},
mounted() {
- console.log(this.$route.params)
this.getDetail()
},
methods: {
@@ -38,7 +37,6 @@ export default {
api
.fetch(`streamis/streamJobManager/job/jobContent?${query}`, 'get')
.then(res => {
- console.log(res)
if (res && res.jobContent) {
this.detailName = 'jarDetail'
this.data = res.jobContent
@@ -82,14 +80,14 @@ export default {
})
.catch(e => console.log(e))
},
- showVersionInfo(row) {
- console.log(row)
+ showVersionInfo() {
+ // console.log(row)
},
- showDetail(row) {
- console.log(row)
+ showDetail() {
+ // console.log(row)
},
- showLogs(row) {
- console.log(row)
+ showLogs() {
+ // console.log(row)
}
}
}
diff --git a/web/src/apps/streamis/module/jobHistory/index.vue b/web/src/apps/streamis/module/jobHistory/index.vue
index 36d38f79d..eed667c7f 100644
--- a/web/src/apps/streamis/module/jobHistory/index.vue
+++ b/web/src/apps/streamis/module/jobHistory/index.vue
@@ -110,7 +110,6 @@ export default {
},
mounted() {
this.getDatas()
- console.log(this.$route.params)
},
methods: {
getDatas() {
@@ -126,7 +125,6 @@ export default {
.catch(e => console.log(e))
},
showVersionInfo(data) {
- console.log(data)
this.loading = true
api
.fetch(
@@ -137,23 +135,20 @@ export default {
'get'
)
.then(res => {
- console.log(res)
if (res) {
this.loading = false
this.modalVisible = true
this.versionDatas = [res.detail]
}
})
- .catch(e => {
- console.log(e)
+ .catch(() => {
this.loading = false
})
},
- showDetail(row) {
- console.log(row)
+ showDetail() {
+ // console.log(row)
},
showLogs(row) {
- console.log(row)
this.$refs['logDetail'].getDatas(row.taskId)
this.logVisible = true;
this.taskId = +row.taskId;
diff --git a/web/src/apps/streamis/module/jobList/index.vue b/web/src/apps/streamis/module/jobList/index.vue
index c02f0ea0a..3dc6c1103 100644
--- a/web/src/apps/streamis/module/jobList/index.vue
+++ b/web/src/apps/streamis/module/jobList/index.vue
@@ -9,7 +9,7 @@
{{$t('message.streamis.formItems.snapshotRestart')}}
@@ -19,7 +19,7 @@
{{$t('message.streamis.formItems.directRestart')}}
@@ -35,7 +35,7 @@
-