diff --git a/.gitignore b/.gitignore
index aa5f9b252..983d6454a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -101,6 +101,9 @@ dss-apps/dss-data-governance/dss-data-warehouse-dao/target
dss-apps/dss-data-governance/dss-data-warehouse-service/target
dss-apps/dss-data-governance/dss-data-warehouse-server/target
+#dss-git
+dss-git/dss-git-common/target
+dss-git/dss-git-server/target
# plugins
plugins/azkaban/linkis-jobtype/target
diff --git a/assembly/dss-package/pom.xml b/assembly/dss-package/pom.xml
index 842307e24..7607ea840 100644
--- a/assembly/dss-package/pom.xml
+++ b/assembly/dss-package/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
../../pom.xml
4.0.0
diff --git a/assembly/pom.xml b/assembly/pom.xml
index 0e97116a4..5de3c2e08 100644
--- a/assembly/pom.xml
+++ b/assembly/pom.xml
@@ -22,7 +22,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
../pom.xml
pom
diff --git a/conf/dss-framework-orchestrator-server.properties b/conf/dss-framework-orchestrator-server.properties
index b70b14792..d6d0a1c4d 100644
--- a/conf/dss-framework-orchestrator-server.properties
+++ b/conf/dss-framework-orchestrator-server.properties
@@ -32,7 +32,6 @@ wds.linkis.server.mybatis.typeAliasesPackage=com.webank.wedatasphere.dss.server.
wds.linkis.server.mybatis.BasePackage=com.webank.wedatasphere.dss.framework.appconn.dao,com.webank.wedatasphere.dss.orchestrator.core.dao,com.webank.wedatasphere.dss.server.dao,com.webank.wedatasphere.dss.application.dao,com.webank.wedatasphere.dss.workspace.mapper,com.webank.wedatasphere.dss.workspace.common.dao,com.webank.wedatasphere.dss.workspace.common.dao,com.webank.wedatasphere.dss.orchestrator.db.dao,com.webank.wedatasphere.dss.workflow.dao,com.webank.wedatasphere.dss.framework.appconn.dao,com.webank.wedatasphere.dss.flow.execution.entrance.dao
-wds.dss.server.scheduling.clear.cs.cron=0 0 3 * * ?
wds.dss.publish.max.remain.version=3
diff --git a/db/dss_ddl.sql b/db/dss_ddl.sql
index 1b52e2c60..551b4b59a 100644
--- a/db/dss_ddl.sql
+++ b/db/dss_ddl.sql
@@ -51,6 +51,7 @@ CREATE TABLE `dss_orchestrator_info` (
`orchestrator_level` varchar(32) DEFAULT NULL COMMENT '工作流级别',
`update_user` varchar(100) DEFAULT NULL COMMENT '更新人',
`update_time` datetime DEFAULT CURRENT_TIMESTAMP COMMENT '更新时间',
+ `status` VARCHAR(64),
PRIMARY KEY (`id`) USING BTREE,
UNIQUE KEY `unique_idx_uuid` (`uuid`)
) ENGINE=InnoDB AUTO_INCREMENT=326 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT;
@@ -70,6 +71,7 @@ CREATE TABLE `dss_orchestrator_version_info` (
`content` varchar(255) DEFAULT NULL,
`context_id` varchar(200) DEFAULT NULL COMMENT '上下文ID',
`valid_flag` INT(1) DEFAULT '1' COMMENT '版本有效标示,0:无效;1:有效',
+ `commit_id` varchar(64),
PRIMARY KEY (`id`) USING BTREE
) ENGINE=InnoDB AUTO_INCREMENT=422 DEFAULT CHARSET=utf8mb4 ROW_FORMAT=COMPACT;
@@ -116,6 +118,7 @@ CREATE TABLE `dss_project` (
`dev_process` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT '开发流程,多个以英文逗号分隔,取得的值是dss_workspace_dictionary中的dic_key(parent_key=p_develop_process)',
`orchestrator_mode` varchar(200) COLLATE utf8_bin DEFAULT NULL COMMENT '编排模式,多个以英文逗号分隔,取得的值是dss_workspace_dictionary中的dic_key(parent_key=p_arrangement_mode或下面一级)',
`visible` tinyint(4) DEFAULT '1' COMMENT '0:已删除;1:未删除(默认)',
+ `associate_git` TINYINT DEFAULT '0' COMMENT '0:未接入git,1:已接入git',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=313 DEFAULT CHARSET=utf8 COLLATE=utf8_bin ROW_FORMAT=COMPACT;
@@ -639,3 +642,33 @@ key `idx_limit_name` (`limit_name`)
) ENGINE = InnoDB
DEFAULT CHARSET = utf8mb4 COLLATE=utf8mb4_bin COMMENT ='dss用户限制表';
+DROP TABLE IF EXISTS `dss_workspace_associate_git`;
+CREATE TABLE `dss_workspace_associate_git` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT,
+ `workspace_id` bigint(20) DEFAULT NULL,
+ `git_user` varchar(64) DEFAULT NULL COMMENT 'git登录用户名',
+ `git_password` VARCHAR(255) DEFAULT NULL COMMENT 'git登录密码,用于跳转',
+ `git_token` varchar(255) COMMENT '用户配置的git token',
+ `git_url` varchar(255),
+ `create_time` datetime DEFAULT NULL,
+ `update_time` datetime DEFAULT NULL,
+ `create_by` varchar(128) DEFAULT NULL,
+ `update_by` varchar(128) DEFAULT NULL,
+ `type` varchar(32) DEFAULT NULL,
+ `git_user_id` varchar(32) DEFAULT NULL,
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='工作空间绑定的git信息';
+
+
+DROP TABLE IF EXISTS `dss_orchestrator_submit_job_info`;
+CREATE TABLE `dss_orchestrator_submit_job_info` (
+ `id` bigint(20) NOT NULL AUTO_INCREMENT COMMENT '主键',
+ `orchestrator_id` bigint(20) NOT NULL,
+ `create_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '创建时间',
+ `update_time` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP COMMENT '修改时间',
+ `instance_name` varchar(128) DEFAULT NULL COMMENT '提交任务的实例',
+ `status` varchar(128) DEFAULT NULL COMMENT '提交任务状态',
+ `error_msg` varchar(2048) DEFAULT NULL COMMENT '提交任务异常信息',
+ PRIMARY KEY (`id`)
+) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin COMMENT='dss_orchestrator_submit_job_info表';
+
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/pom.xml b/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
index 51cf70eaa..94ac53ec0 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-datachecker-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
../../../pom.xml
4.0.0
@@ -54,7 +54,7 @@
com.webank.wedatasphere.dss
dss-origin-sso-integration-standard
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
org.apache.linkis
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java
index 61ff244b5..e77938da5 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java
+++ b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/DataChecker.java
@@ -48,7 +48,7 @@ public DataChecker(Properties p, DataCheckerExecutionAction action) {
maxWaitTime = Long.valueOf(p.getProperty(DataChecker.WAIT_TIME, "1")) * 3600 * 1000;
//test over time
// maxWaitTime = Long.valueOf(p.getProperty(DataChecker.WAIT_TIME, "1")) * 120 * 1000;
- queryFrequency = Integer.valueOf(p.getProperty(DataChecker.QUERY_FREQUENCY, "30000"));
+ queryFrequency = Integer.valueOf(p.getProperty(DataChecker.QUERY_FREQUENCY, "60000"));
}
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java
index e3bc2ae27..c2275aa1b 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java
+++ b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataCheckerDao.java
@@ -54,13 +54,6 @@ public class DataCheckerDao {
private static final String SQL_SOURCE_TYPE_JOB_PARTITION =
"SELECT * FROM DBS d JOIN TBLS t ON t.DB_ID = d.DB_ID JOIN PARTITIONS p ON p.TBL_ID = t.TBL_ID WHERE d.NAME=? AND t.TBL_NAME=? AND p.PART_NAME=?";
- private static final String SQL_SOURCE_TYPE_BDP =
- "SELECT * FROM desktop_bdapimport WHERE bdap_db_name = ? AND bdap_table_name = ? AND target_partition_name = ? AND status = '1';";
-
- private static final String SQL_SOURCE_TYPE_BDP_WITH_TIME_CONDITION =
- "SELECT * FROM desktop_bdapimport WHERE bdap_db_name = ? AND bdap_table_name = ? AND target_partition_name = ? " +
- "AND (UNIX_TIMESTAMP() - UNIX_TIMESTAMP(STR_TO_DATE(modify_time, '%Y-%m-%d %H:%i:%s'))) <= ? AND status = '1';";
-
private static final String SQL_DOPS_CHECK_TABLE =
"SELECT * FROM dops_clean_task_list WHERE db_name = ? AND tb_name = ? AND part_name is null AND task_state NOT IN (10,13) order by order_id desc limit 1";
private static final String SQL_DOPS_CHECK_PARTITION =
@@ -72,7 +65,6 @@ public class DataCheckerDao {
private static final String MASK_SOURCE_TYPE = "maskdb";
private static DataSource jobDS;
- private static DataSource bdpDS;
private static DataSource dopsDS;
private static volatile DataCheckerDao instance;
@@ -96,13 +88,6 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
return false;
}
}
- if (bdpDS == null) {
- bdpDS = DataDruidFactory.getBDPInstance(props, log);
- if (bdpDS == null) {
- log.warn("Error getting job Druid DataSource instance");
- return false;
- }
- }
boolean systemCheck = Boolean.valueOf(props.getProperty(DataChecker.QUALITIS_SWITCH));
if (systemCheck && dopsDS == null) {
dopsDS = DataDruidFactory.getDopsInstance(props, log);//通过alibaba的druid数据库连接池获取JOB数据库连接
@@ -122,7 +107,7 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
}
log.info("(DataChecker info) database table partition info : " + dataCheckerInfo);
long waitTime = Long.valueOf(props.getProperty(DataChecker.WAIT_TIME, "1")) * 3600 * 1000;
- int queryFrequency = Integer.valueOf(props.getProperty(DataChecker.QUERY_FREQUENCY, "30000"));
+ int queryFrequency = Integer.valueOf(props.getProperty(DataChecker.QUERY_FREQUENCY, "60000"));
// String timeScape = props.getProperty(DataChecker.TIME_SCAPE, "NULL");
log.info("(DataChecker info) wait time : " + waitTime);
log.info("(DataChecker info) query frequency : " + queryFrequency);
@@ -134,13 +119,12 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
});
QualitisUtil qualitisUtil = new QualitisUtil(props);
try (Connection jobConn = jobDS.getConnection();
- Connection bdpConn = bdpDS.getConnection();
Connection dopsConn = dopsDS != null ? dopsDS.getConnection() : null) {
List allCheckRes = dataObjectList
.parallelStream()
.map(proObjectMap -> {
log.info("Begin to Check dataObject:" + proObjectMap.entrySet().toString());
- boolean checkRes = getDataCheckResult(proObjectMap, jobConn, bdpConn, dopsConn, props, log,action,qualitisUtil);
+ boolean checkRes = getDataCheckResult(proObjectMap, jobConn, dopsConn, props, log,action,qualitisUtil);
if (null != action.getExecutionRequestRefContext()) {
if (checkRes) {
action.getExecutionRequestRefContext().appendLog("Database table partition info : " + proObjectMap.get(DataChecker.DATA_OBJECT) + " has arrived");
@@ -178,7 +162,6 @@ public boolean validateTableStatusFunction(Properties props, Logger log, DataChe
private boolean getDataCheckResult(Map proObjectMap,
Connection jobConn,
- Connection bdpConn,
Connection dopsConn,
Properties props,
Logger log,
@@ -231,7 +214,7 @@ private boolean getDataCheckResult(Map proObjectMap,
}
log.info("start to check maskis");
proObjectMap.put(DataChecker.SOURCE_TYPE, MASK_SOURCE_TYPE);
- normalCheck= (getBdpTotalCount(dataObject, bdpConn, log, props) > 0 || "success".equals(fetchMaskCode(dataObject, log, props).get("maskStatus")));
+ normalCheck= "success".equals(fetchMaskCode(dataObject, log, props).get("maskStatus"));
if (null != action.getExecutionRequestRefContext()){
action.getExecutionRequestRefContext().appendLog(dataObjectStr+" check maskis end,check result:"+normalCheck);
}
@@ -316,25 +299,6 @@ private PreparedStatement getJobStatement(Connection conn, CheckDataObject dataO
}
}
- /**
- * 构造查询maskis的查询
- */
- private PreparedStatement getBdpStatement(Connection conn, CheckDataObject dataObject, String timeScape) throws SQLException {
- PreparedStatement pstmt = null;
- if (timeScape.equals("NULL")) {
- pstmt = conn.prepareCall(SQL_SOURCE_TYPE_BDP);
- } else {
- pstmt = conn.prepareCall(SQL_SOURCE_TYPE_BDP_WITH_TIME_CONDITION);
- pstmt.setInt(4, Integer.valueOf(timeScape) * 3600);
- }
- if (dataObject.getPartitionName() == null) {
- dataObject.setPartitionName("");
- }
- pstmt.setString(1, dataObject.getDbName());
- pstmt.setString(2, dataObject.getTableName());
- pstmt.setString(3, dataObject.getPartitionName());
- return pstmt;
- }
/**
* 构造查询dops库的查询
@@ -414,27 +378,6 @@ private long getJobTotalCount(CheckDataObject dataObject, Connection conn, Logge
}
}
- /**
- * 查mask db
- */
- private long getBdpTotalCount(CheckDataObject dataObject, Connection conn, Logger log, Properties props) {
- String timeScape = props.getOrDefault(DataChecker.TIME_SCAPE, "NULL").toString();
- log.info("-------------------------------------- search bdp data ");
- log.info("-------------------------------------- dataObject: " + dataObject.toString());
- try (PreparedStatement pstmt = getBdpStatement(conn, dataObject, timeScape)) {
- ResultSet rs = pstmt.executeQuery();
- long ret = 0L;
- while (rs.next()) {
- ret ++;
- }
-// long ret=rs.last() ? rs.getRow() : 0;
- log.info("-------------------------------------- bdp data result:"+ret);
- return ret;
- } catch (SQLException e) {
- log.error("fetch data from bdp error", e);
- return 0;
- }
- }
/**
* - 返回0表示未找到任何记录 ;
diff --git a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataDruidFactory.java b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataDruidFactory.java
index 32dba01eb..6d8afb3fe 100644
--- a/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataDruidFactory.java
+++ b/dss-appconn/appconns/dss-datachecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/datachecker/connector/DataDruidFactory.java
@@ -25,7 +25,9 @@
public class DataDruidFactory {
private static volatile DruidDataSource jobInstance;
- private static volatile DruidDataSource bdpInstance;
+
+ private static volatile DruidDataSource dopsInstance;
+
private static volatile DruidDataSource msgInstance;
public static DruidDataSource getJobInstance(Properties props, Logger log) {
@@ -42,6 +44,7 @@ public static DruidDataSource getJobInstance(Properties props, Logger log) {
}
return jobInstance;
}
+
public static DruidDataSource getBDPInstance(Properties props, Logger log) {
if (bdpInstance == null ) {
synchronized (DataDruidFactory.class) {
diff --git a/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml b/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml
index 8543d92a9..e56b91b69 100644
--- a/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-dolphinscheduler-appconn/pom.xml
@@ -6,7 +6,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
../../../pom.xml
4.0.0
@@ -18,7 +18,6 @@
com.webank.wedatasphere.dss
dss-scheduler-appconn
${dss.version}
-
provided
@@ -81,7 +80,7 @@
com.google.guava
guava
- 28.2-android
+ 33.1.0-jre
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml b/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
index 7539d9e68..6d375adc2 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java
index 1854973ae..525c19861 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/entity/EventChecker.java
@@ -67,10 +67,10 @@ public EventChecker(Properties p, EventCheckerExecutionAction action) {
String waitTime = p.getProperty(EventChecker.WAIT_TIME, "1");
Double doubleWaitTime = Double.valueOf(waitTime) * 3600 * 1000;
maxWaitTime = Long.valueOf(doubleWaitTime.longValue());
- String query_frequency = p.getProperty(EventChecker.QUERY_FREQUENCY, "30000");
+ String query_frequency = p.getProperty(EventChecker.QUERY_FREQUENCY, "60000");
queryFrequency = Integer.valueOf(query_frequency);
- if(queryFrequency <10000){
- queryFrequency = 10000;
+ if(queryFrequency <60000){
+ queryFrequency = 60000;
}
}
diff --git a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java
index 23ed8576f..8d51d4cf1 100644
--- a/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java
+++ b/dss-appconn/appconns/dss-eventchecker-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/eventchecker/service/AbstractEventCheck.java
@@ -70,7 +70,7 @@ void initECParams(Properties props){
runDate = props.getProperty("run_date");
userTime = props.getProperty(EventChecker.USER_TIME);
waitTime = props.getProperty(EventChecker.WAIT_TIME, "1");
- query_frequency = props.getProperty(EventChecker.QUERY_FREQUENCY, "30000");
+ query_frequency = props.getProperty(EventChecker.QUERY_FREQUENCY, "60000");
afterSend = props.getProperty(EventChecker.AFTERSEND);
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/pom.xml b/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
index ae044adb0..5d2acc145 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
+++ b/dss-appconn/appconns/dss-schedulis-appconn/pom.xml
@@ -21,7 +21,7 @@
dss
com.webank.wedatasphere.dss
- 1.1.0.20-SNAPSHOT
+ 1.5.0-SNAPSHOT
../../../pom.xml
4.0.0
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java
index 821c3a6d0..87d5265a0 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/constant/AzkabanConstant.java
@@ -29,5 +29,6 @@ public class AzkabanConstant {
public final static String FLOW_CONTEXT_ID = "wds.linkis.flow.contextID=";
public final static String LINKIS_VERSION = "linkis.version";
public final static String JOB_COMMENT = "comment";
+ public final static String AUTO_DISABLED = "auto.disabled";
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java
index 76e7ef885..2d269872f 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJob.java
@@ -23,6 +23,8 @@ public class LinkisJob {
private String type;
private String linkistype;
private String proxyUser;
+
+ private String autoDisabled;
private String dependencies;
private Map conf;
private String command;
@@ -60,6 +62,14 @@ public void setProxyUser(String proxyUser) {
this.proxyUser = proxyUser;
}
+ public String getAutoDisabled() {
+ return autoDisabled;
+ }
+
+ public void setAutoDisabled(String autoDisabled) {
+ this.autoDisabled = autoDisabled;
+ }
+
public String getDependencies() {
return dependencies;
}
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java
index f175b6ca3..c60882d50 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/linkisjob/LinkisJobConverter.java
@@ -70,6 +70,7 @@ private String convertJobToString(LinkisJob job){
map.put(WorkflowConstant.PROXY_USER,job.getProxyUser());
map.put(AzkabanConstant.JOB_COMMAND,job.getCommand());
map.put(AzkabanConstant.JOB_COMMENT,job.getComment());
+ map.put(AzkabanConstant.AUTO_DISABLED,job.getAutoDisabled());
Map labels = new HashMap<>(1);
labels.put("route", SchedulerConf.JOB_LABEL.getValue());
map.put(AzkabanConstant.JOB_LABELS, DSSCommonUtils.COMMON_GSON.toJson(labels));
@@ -114,7 +115,8 @@ private void convertConfiguration(WorkflowNode workflowNode, LinkisJob job){
configuration.forEach((k,v)-> {
if(null!=v) {
v.forEach((k2, v2) -> {
- if(null !=v2) {job.getConf().put(confprefix + k + "." + k2, v2.toString());}
+ if(AzkabanConstant.AUTO_DISABLED.equals(k2) && null !=v2){job.setAutoDisabled(v2.toString());}
+ else if(null !=v2) {job.getConf().put(confprefix + k + "." + k2, v2.toString());}
});
}
});
diff --git a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java
index b15e20d30..7514853d6 100644
--- a/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java
+++ b/dss-appconn/appconns/dss-schedulis-appconn/src/main/java/com/webank/wedatasphere/dss/appconn/schedulis/operation/SchedulisProjectSearchOperation.java
@@ -28,6 +28,7 @@ public ProjectResponseRef searchProject(RefProjectContentRequestRef.RefProjectCo
params.put("project", requestRef.getProjectName());
params.put("ajax", "fetchprojectflows");
try {
+ logger.info("request url from Schedulis is: {}.", queryUrl);
String responseBody = SchedulisHttpUtils.getHttpGetResult(queryUrl, params, ssoRequestOperation, requestRef.getWorkspace());
logger.info("responseBody from Schedulis is: {}.", responseBody);
Map map = DSSCommonUtils.COMMON_GSON.fromJson(responseBody, new TypeToken