Browse Source

process definition json worker group convert #2794 (#2809)

* add LoggerServerTest UT

* add LoggerServerTest UT

* add LoggerServerTest UT
add RemoveTaskLogRequestCommandTest UT
add RemoveTaskLogResponseCommandTest

* master select worker filter high load worker #2704

* master select worker filter high load worker #2704

* master select worker filter high load worker #2704

* master select worker filter high load worker #2704

* master select worker filter high load worker #2704

* master select worker filter high load worker #2704

* add not worker log and remove worker invalid property

* process definition json worker group convert #2794

* process definition json worker group convert #2794

* process definition json worker group convert #2794

* process definition json worker group convert #2794

* process definition json worker group convert #2794

* process definition json worker group convert #2794

* process definition json worker group convert #2794

* process definition json worker group convert #2794

Co-authored-by: qiaozhanwei <qiaozhanwei@analysys.com.cn>
pull/3/MERGE
qiaozhanwei 5 years ago committed by GitHub
parent
commit
d0fbfeaf17
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 22
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java

22
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/upgrade/UpgradeDao.java

@ -16,6 +16,8 @@
*/ */
package org.apache.dolphinscheduler.dao.upgrade; package org.apache.dolphinscheduler.dao.upgrade;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.apache.dolphinscheduler.common.enums.DbType; import org.apache.dolphinscheduler.common.enums.DbType;
import org.apache.dolphinscheduler.common.model.TaskNode; import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.utils.*; import org.apache.dolphinscheduler.common.utils.*;
@ -274,18 +276,24 @@ public abstract class UpgradeDao extends AbstractBaseDao {
Map<Integer,String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection()); Map<Integer,String> processDefinitionJsonMap = processDefinitionDao.queryAllProcessDefinition(dataSource.getConnection());
for (Map.Entry<Integer,String> entry : processDefinitionJsonMap.entrySet()){ for (Map.Entry<Integer,String> entry : processDefinitionJsonMap.entrySet()){
ProcessData processData = JSONUtils.parseObject(entry.getValue(), ProcessData.class); JSONObject jsonObject = JSONObject.parseObject(entry.getValue());
JSONArray tasks = JSONArray.parseArray(jsonObject.getString("tasks"));
List<TaskNode> tasks = processData.getTasks(); for (int i = 0 ;i < tasks.size() ; i++){
for (TaskNode taskNode : tasks){ JSONObject task = tasks.getJSONObject(i);
Integer workerGroupId = taskNode.getWorkerGroupId(); Integer workerGroupId = task.getInteger("workerGroupId");
if (workerGroupId == -1) { if (workerGroupId == -1) {
taskNode.setWorkerGroup("default"); task.put("workerGroup", "default");
}else { }else {
taskNode.setWorkerGroup(oldWorkerGroupMap.get(workerGroupId)); task.put("workerGroup", oldWorkerGroupMap.get(workerGroupId));
} }
} }
replaceProcessDefinitionMap.put(entry.getKey(),JSONUtils.toJson(processData));
jsonObject.remove(jsonObject.getString("tasks"));
jsonObject.put("tasks",tasks);
replaceProcessDefinitionMap.put(entry.getKey(),jsonObject.toJSONString());
} }
if (replaceProcessDefinitionMap.size() > 0){ if (replaceProcessDefinitionMap.size() > 0){
processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(),replaceProcessDefinitionMap); processDefinitionDao.updateProcessDefinitionJson(dataSource.getConnection(),replaceProcessDefinitionMap);

Loading…
Cancel
Save