Browse Source

Merge pull request #5 from apache/dev

update local dev
pull/3/MERGE
Zhou.Z 4 years ago committed by GitHub
parent
commit
373cc47bae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 11
      dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java
  2. 24
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java
  3. 264
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java
  4. 548
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java
  5. 17
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java
  6. 109
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java
  7. 10
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java
  8. 50
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java
  9. 444
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java
  10. 32
      dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/enums/ExecutionStatusTest.java
  11. 81
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java
  12. 156
      dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java
  13. 32
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java
  14. 75
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapperTest.java
  15. 85
      dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java
  16. 1
      dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java
  17. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java
  18. 171
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java
  19. 21
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/future/TaskFuture.java
  20. 1
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java
  21. 1
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java
  22. 1
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java
  23. 21
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java
  24. 54
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java
  25. 17
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java
  26. 4
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManager.java
  27. 109
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java
  28. 39
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java
  29. 98
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java
  30. 22
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java
  31. 2
      dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java
  32. 171
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java
  33. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java
  34. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java
  35. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManagerTest.java
  36. 3
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManagerTest.java
  37. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java
  38. 4
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java
  39. 171
      dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThreadTest.java
  40. 98
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java
  41. 23
      dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java
  42. 7
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js
  43. 21
      dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue
  44. 3
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js
  45. 147
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskStatusCount.vue
  46. 8
      dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue
  47. 2
      dolphinscheduler-ui/src/js/conf/home/store/projects/actions.js
  48. 2
      dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js
  49. 2
      dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js
  50. 40
      e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java
  51. 12
      e2e/src/test/java/org/apache/dolphinscheduler/locator/project/WorkflowDefineLocator.java
  52. 5
      e2e/src/test/java/org/apache/dolphinscheduler/page/project/WorkflowDefinePage.java
  53. 5
      pom.xml
  54. 4
      sql/dolphinscheduler-postgre.sql
  55. 2
      sql/dolphinscheduler_mysql.sql
  56. 1
      sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_ddl.sql
  57. 58
      sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql
  58. 16
      sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_dml.sql
  59. 52
      sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql
  60. 16
      sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_dml.sql
  61. 3
      style/intellij-java-code-style.xml

11
dolphinscheduler-alert/src/main/java/org/apache/dolphinscheduler/alert/utils/MailUtils.java

@ -59,7 +59,7 @@ public class MailUtils {
public static final String STARTTLS_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_STARTTLS_ENABLE);
public static final String SSL_ENABLE = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_ENABLE);
public static final Boolean SSL_ENABLE = PropertyUtils.getBoolean(Constants.MAIL_SMTP_SSL_ENABLE);
public static final String SSL_TRUST = PropertyUtils.getString(Constants.MAIL_SMTP_SSL_TRUST);
@ -213,6 +213,7 @@ public class MailUtils {
/**
* get session
*
* @return the new Session
*/
private static Session getSession() {
@ -222,8 +223,10 @@ public class MailUtils {
props.setProperty(Constants.MAIL_SMTP_AUTH, Constants.STRING_TRUE);
props.setProperty(Constants.MAIL_TRANSPORT_PROTOCOL, MAIL_PROTOCOL);
props.setProperty(Constants.MAIL_SMTP_STARTTLS_ENABLE, STARTTLS_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, SSL_ENABLE);
props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, SSL_TRUST);
if (SSL_ENABLE) {
props.setProperty(Constants.MAIL_SMTP_SSL_ENABLE, "true");
props.setProperty(Constants.MAIL_SMTP_SSL_TRUST, SSL_TRUST);
}
Authenticator auth = new Authenticator() {
@Override
@ -345,5 +348,5 @@ public class MailUtils {
retMap.put(Constants.MESSAGE, "Send email to {" + String.join(",", receivers) + "} failed," + e.toString());
}
}

24
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/dto/TaskCountDto.java

@ -42,9 +42,10 @@ public class TaskCountDto {
countTaskDtos(taskInstanceStateCounts);
}
private void countTaskDtos(List<ExecuteStatusCount> taskInstanceStateCounts){
private void countTaskDtos(List<ExecuteStatusCount> taskInstanceStateCounts) {
int submittedSuccess = 0;
int runningExeution = 0;
int runningExecution = 0;
int delayExecution = 0;
int readyPause = 0;
int pause = 0;
int readyStop = 0;
@ -55,15 +56,18 @@ public class TaskCountDto {
int kill = 0;
int waittingThread = 0;
for(ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts){
for (ExecuteStatusCount taskInstanceStateCount : taskInstanceStateCounts) {
ExecutionStatus status = taskInstanceStateCount.getExecutionStatus();
totalCount += taskInstanceStateCount.getCount();
switch (status){
switch (status) {
case SUBMITTED_SUCCESS:
submittedSuccess += taskInstanceStateCount.getCount();
break;
case RUNNING_EXECUTION:
runningExeution += taskInstanceStateCount.getCount();
runningExecution += taskInstanceStateCount.getCount();
break;
case DELAY_EXECUTION:
delayExecution += taskInstanceStateCount.getCount();
break;
case READY_PAUSE:
readyPause += taskInstanceStateCount.getCount();
@ -93,13 +97,14 @@ public class TaskCountDto {
waittingThread += taskInstanceStateCount.getCount();
break;
default:
break;
default:
break;
}
}
this.taskCountDtos = new ArrayList<>();
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.SUBMITTED_SUCCESS, submittedSuccess));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXECUTION, runningExeution));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.RUNNING_EXECUTION, runningExecution));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.DELAY_EXECUTION, delayExecution));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_PAUSE, readyPause));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.PAUSE, pause));
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.READY_STOP, readyStop));
@ -111,8 +116,7 @@ public class TaskCountDto {
this.taskCountDtos.add(new TaskStateCount(ExecutionStatus.WAITTING_THREAD, waittingThread));
}
public List<TaskStateCount> getTaskCountDtos(){
public List<TaskStateCount> getTaskCountDtos() {
return taskCountDtos;
}

264
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/ProcessDefinitionServiceImpl.java

@ -14,27 +14,11 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.apache.dolphinscheduler.api.dto.ProcessMeta;
import org.apache.dolphinscheduler.api.dto.treeview.Instance;
import org.apache.dolphinscheduler.api.dto.treeview.TreeViewDto;
@ -85,6 +69,26 @@ import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.utils.DagHelper;
import org.apache.dolphinscheduler.service.permission.PermissionCheck;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.io.BufferedOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
@ -92,6 +96,7 @@ import org.springframework.http.MediaType;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.fasterxml.jackson.core.JsonProcessingException;
@ -100,11 +105,11 @@ import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process definition service
* process definition service impl
*/
@Service
public class ProcessDefinitionServiceImpl extends BaseService implements
ProcessDefinitionService {
ProcessDefinitionService {
private static final Logger logger = LoggerFactory.getLogger(ProcessDefinitionServiceImpl.class);
@ -138,13 +143,13 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* create process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return create result code
* @throws JsonProcessingException JsonProcessingException
*/
@ -156,7 +161,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
String locations,
String connects) throws JsonProcessingException {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
// check project auth
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -238,13 +243,13 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* query process definition list
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @return definition list
*/
public Map<String, Object> queryProcessDefinitionList(User loginUser, String projectName) {
HashMap<String, Object> result = new HashMap<>(5);
HashMap<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -264,17 +269,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* query process definition list paging
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @param searchVal search value
* @param pageNo page number
* @param pageSize page size
* @param userId user id
* @return process definition page
*/
public Map<String, Object> queryProcessDefinitionListPaging(User loginUser, String projectName, String searchVal, Integer pageNo, Integer pageSize, Integer userId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -283,11 +288,11 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return checkResult;
}
Page<ProcessDefinition> page = new Page(pageNo, pageSize);
Page<ProcessDefinition> page = new Page<>(pageNo, pageSize);
IPage<ProcessDefinition> processDefinitionIPage = processDefineMapper.queryDefineListPaging(
page, searchVal, userId, project.getId(), isAdmin(loginUser));
PageInfo pageInfo = new PageInfo<ProcessData>(pageNo, pageSize);
PageInfo<ProcessDefinition> pageInfo = new PageInfo<>(pageNo, pageSize);
pageInfo.setTotalCount((int) processDefinitionIPage.getTotal());
pageInfo.setLists(processDefinitionIPage.getRecords());
result.put(Constants.DATA_LIST, pageInfo);
@ -299,15 +304,15 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* query datail of process definition
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param processId process definition id
* @param processId process definition id
* @return process definition detail
*/
public Map<String, Object> queryProcessDefinitionById(User loginUser, String projectName, Integer processId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -329,20 +334,20 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* update process definition
*
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param name process definition name
* @param id process definition id
* @param processDefinitionJson process definition json
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @param desc description
* @param locations locations for nodes
* @param connects connects for nodes
* @return update result code
*/
public Map<String, Object> updateProcessDefinition(User loginUser, String projectName, int id, String name,
String processDefinitionJson, String desc,
String locations, String connects) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -405,9 +410,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* verify process definition name unique
*
* @param loginUser login user
* @param loginUser login user
* @param projectName project name
* @param name name
* @param name name
* @return true if process definition name not exists, otherwise false
*/
public Map<String, Object> verifyProcessDefinitionName(User loginUser, String projectName, String name) {
@ -432,15 +437,15 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* delete process definition by id
*
* @param loginUser login user
* @param projectName project name
* @param loginUser login user
* @param projectName project name
* @param processDefinitionId process definition id
* @return delete result code
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> deleteProcessDefinitionById(User loginUser, String projectName, Integer processDefinitionId) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
Project project = projectMapper.queryByName(projectName);
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName);
@ -497,9 +502,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* release process definition: online / offline
*
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param loginUser login user
* @param projectName project name
* @param id process definition id
* @param releaseState release state
* @return release result code
*/
@ -569,11 +574,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* batch export process definition by ids
*
* @param loginUser
* @param projectName
* @param processDefinitionIds
* @param response
*/
public void batchExportProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds, HttpServletResponse response) {
@ -602,9 +602,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* get process definition list by ids
*
* @param processDefinitionIds
* @return
*/
private List<ProcessMeta> getProcessDefinitionList(String processDefinitionIds) {
List<ProcessMeta> processDefinitionList = new ArrayList<>();
@ -623,9 +620,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* download the process definition file
*
* @param response
* @param processDefinitionList
*/
private void downloadProcessDefinitionFile(HttpServletResponse response, List<ProcessMeta> processDefinitionList) {
response.setContentType(MediaType.APPLICATION_JSON_UTF8_VALUE);
@ -661,19 +655,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @return export process metadata string
*/
private String exportProcessMetaDataStr(Integer processDefinitionId, ProcessDefinition processDefinition) {
//create workflow json file
return JSONUtils.toJsonString(exportProcessMetaData(processDefinitionId, processDefinition));
}
/**
* get export process metadata string
*
* @param processDefinitionId process definition id
* @param processDefinition process definition
* @param processDefinition process definition
* @return export process metadata string
*/
public ProcessMeta exportProcessMetaData(Integer processDefinitionId, ProcessDefinition processDefinition) {
@ -745,16 +727,16 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* import process definition
*
* @param loginUser login user
* @param file process metadata json file
* @param loginUser login user
* @param file process metadata json file
* @param currentProjectName current project name
* @return import process
*/
@Transactional(rollbackFor = RuntimeException.class)
public Map<String, Object> importProcessDefinition(User loginUser, MultipartFile file, String currentProjectName) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
String processMetaJson = FileUtils.file2String(file);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
List<ProcessMeta> processMetaList = JSONUtils.toList(processMetaJson, ProcessMeta.class);
//check file content
if (CollectionUtils.isEmpty(processMetaList)) {
@ -774,12 +756,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* check and import process definition
*
* @param loginUser
* @param currentProjectName
* @param result
* @param processMeta
* @return
*/
private boolean checkAndImportProcessDefinition(User loginUser, String currentProjectName, Map<String, Object> result, ProcessMeta processMeta) {
@ -821,8 +797,8 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
//create process definition
Integer processDefinitionId =
Objects.isNull(createProcessResult.get(PROCESSDEFINITIONID)) ?
null : Integer.parseInt(createProcessResult.get(PROCESSDEFINITIONID).toString());
Objects.isNull(createProcessResult.get(PROCESSDEFINITIONID))
? null : Integer.parseInt(createProcessResult.get(PROCESSDEFINITIONID).toString());
//scheduler param
return getImportProcessScheduleResult(loginUser,
@ -836,14 +812,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* get create process result
*
* @param loginUser
* @param currentProjectName
* @param result
* @param processMeta
* @param processDefinitionName
* @param importProcessParam
* @return
*/
private Map<String, Object> getCreateProcessResult(User loginUser,
String currentProjectName,
@ -871,14 +839,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* get import process schedule result
*
* @param loginUser
* @param currentProjectName
* @param result
* @param processMeta
* @param processDefinitionName
* @param processDefinitionId
* @return
*/
private boolean getImportProcessScheduleResult(User loginUser,
String currentProjectName,
@ -903,10 +863,6 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* check importance params
*
* @param processMeta
* @param result
* @return
*/
private boolean checkImportanceParams(ProcessMeta processMeta, Map<String, Object> result) {
if (StringUtils.isEmpty(processMeta.getProjectName())) {
@ -924,12 +880,13 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
return true;
}
/**
* import process add special task param
*
* @param loginUser login user
* @param loginUser login user
* @param processDefinitionJson process definition json
* @param targetProject target project
* @param targetProject target project
* @return import process param
*/
private String addImportTaskNodeParam(User loginUser, String processDefinitionJson, Project targetProject) {
@ -946,7 +903,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
}
//recursive sub-process parameter correction map key for old process id value for new process id
Map<Integer, Integer> subProcessIdMap = new HashMap<>(20);
Map<Integer, Integer> subProcessIdMap = new HashMap<>();
List<Object> subProcessList = StreamUtils.asStream(jsonArray.elements())
.filter(elem -> checkTaskHasSubProcess(JSONUtils.parseObject(elem.toString()).path("type").asText()))
@ -963,11 +920,11 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* import process schedule
*
* @param loginUser login user
* @param currentProjectName current project name
* @param processMeta process meta data
* @param loginUser login user
* @param currentProjectName current project name
* @param processMeta process meta data
* @param processDefinitionName process definition name
* @param processDefinitionId process definition id
* @param processDefinitionId process definition id
* @return insert schedule flag
*/
public int importProcessSchedule(User loginUser, String currentProjectName, ProcessMeta processMeta,
@ -1017,9 +974,9 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
* check import process has sub process
* recursion create sub process
*
* @param loginUser login user
* @param targetProject target project
* @param jsonArray process task array
* @param loginUser login user
* @param targetProject target project
* @param jsonArray process task array
* @param subProcessIdMap correct sub process id map
*/
private void importSubProcess(User loginUser, Project targetProject, ArrayNode jsonArray, Map<Integer, Integer> subProcessIdMap) {
@ -1105,13 +1062,13 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* check the process definition node meets the specifications
*
* @param processData process data
* @param processData process data
* @param processDefinitionJson process definition json
* @return check result code
*/
public Map<String, Object> checkProcessNodeList(ProcessData processData, String processDefinitionJson) {
Map<String, Object> result = new HashMap<>(5);
Map<String, Object> result = new HashMap<>();
try {
if (processData == null) {
logger.error("process data is null");
@ -1206,7 +1163,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
for (String definitionId : idList) {
idIntList.add(Integer.parseInt(definitionId));
}
Integer[] idArray = idIntList.toArray(new Integer[idIntList.size()]);
Integer[] idArray = idIntList.toArray(new Integer[0]);
List<ProcessDefinition> processDefinitionList = processDefineMapper.queryDefinitionListByIdList(idArray);
if (CollectionUtils.isEmpty(processDefinitionList)) {
logger.info("process definition not exists");
@ -1237,7 +1194,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
*/
public Map<String, Object> queryProcessDefinitionAllByProjectId(Integer projectId) {
HashMap<String, Object> result = new HashMap<>(5);
HashMap<String, Object> result = new HashMap<>();
List<ProcessDefinition> resourceList = processDefineMapper.queryAllDefinitionList(projectId);
result.put(Constants.DATA_LIST, resourceList);
@ -1250,7 +1207,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
* Encapsulates the TreeView structure
*
* @param processId process definition id
* @param limit limit
* @param limit limit
* @return tree view json data
* @throws Exception exception
*/
@ -1474,17 +1431,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
* @return
*/
@Override
public Map<String, Object> batchCopyProcessDefinition(User loginUser,
String projectName,
String processDefinitionIds,
int targetProjectId){
int targetProjectId) {
Map<String, Object> result = new HashMap<>();
List<String> failedProcessList = new ArrayList<>();
@ -1500,12 +1457,12 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
}
Project targetProject = projectMapper.queryDetailById(targetProjectId);
if(targetProject == null){
if (targetProject == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId);
return result;
}
if(!(targetProject.getName()).equals(projectName)){
if (!(targetProject.getName()).equals(projectName)) {
Map<String, Object> checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName());
if (checkTargetProjectResult != null) {
return checkTargetProjectResult;
@ -1515,18 +1472,18 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA);
doBatchCopyProcessDefinition(loginUser, targetProject, failedProcessList, processDefinitionIdList);
checkBatchOperateResult(projectName,targetProject.getName(),result,failedProcessList,true);
checkBatchOperateResult(projectName, targetProject.getName(), result, failedProcessList, true);
return result;
}
/**
* batch move process definition
*
* @param loginUser loginUser
* @param projectName projectName
* @param processDefinitionIds processDefinitionIds
* @param targetProjectId targetProjectId
* @return
*/
@Override
public Map<String, Object> batchMoveProcessDefinition(User loginUser,
@ -1546,14 +1503,14 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
putMsg(result, Status.PROCESS_DEFINITION_IDS_IS_EMPTY, processDefinitionIds);
return result;
}
Project targetProject = projectMapper.queryDetailById(targetProjectId);
if(targetProject == null){
if (targetProject == null) {
putMsg(result, Status.PROJECT_NOT_FOUNT, targetProjectId);
return result;
}
if(!(targetProject.getName()).equals(projectName)){
if (!(targetProject.getName()).equals(projectName)) {
Map<String, Object> checkTargetProjectResult = checkProjectAndAuth(loginUser, targetProject.getName());
if (checkTargetProjectResult != null) {
return checkTargetProjectResult;
@ -1563,22 +1520,23 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
String[] processDefinitionIdList = processDefinitionIds.split(Constants.COMMA);
doBatchMoveProcessDefinition(targetProject, failedProcessList, processDefinitionIdList);
checkBatchOperateResult(projectName,targetProject.getName(),result,failedProcessList,false);
checkBatchOperateResult(projectName, targetProject.getName(), result, failedProcessList, false);
return result;
}
/**
* do batch move process definition
*
* @param targetProject targetProject
* @param failedProcessList failedProcessList
* @param processDefinitionIdList processDefinitionIdList
*/
private void doBatchMoveProcessDefinition(Project targetProject, List<String> failedProcessList, String[] processDefinitionIdList) {
for(String processDefinitionId:processDefinitionIdList){
for (String processDefinitionId : processDefinitionIdList) {
try {
Map<String, Object> moveProcessDefinitionResult =
moveProcessDefinition(Integer.valueOf(processDefinitionId),targetProject);
moveProcessDefinition(Integer.valueOf(processDefinitionId), targetProject);
if (!Status.SUCCESS.equals(moveProcessDefinitionResult.get(Constants.STATUS))) {
setFailedProcessList(failedProcessList, processDefinitionId);
logger.error((String) moveProcessDefinitionResult.get(Constants.MSG));
@ -1591,16 +1549,17 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* batch copy process definition
*
* @param loginUser loginUser
* @param targetProject targetProject
* @param failedProcessList failedProcessList
* @param processDefinitionIdList processDefinitionIdList
*/
private void doBatchCopyProcessDefinition(User loginUser, Project targetProject, List<String> failedProcessList, String[] processDefinitionIdList) {
for(String processDefinitionId:processDefinitionIdList){
for (String processDefinitionId : processDefinitionIdList) {
try {
Map<String, Object> copyProcessDefinitionResult =
copyProcessDefinition(loginUser,Integer.valueOf(processDefinitionId),targetProject);
copyProcessDefinition(loginUser, Integer.valueOf(processDefinitionId), targetProject);
if (!Status.SUCCESS.equals(copyProcessDefinitionResult.get(Constants.STATUS))) {
setFailedProcessList(failedProcessList, processDefinitionId);
logger.error((String) copyProcessDefinitionResult.get(Constants.MSG));
@ -1613,23 +1572,24 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* set failed processList
*
* @param failedProcessList failedProcessList
* @param processDefinitionId processDefinitionId
*/
private void setFailedProcessList(List<String> failedProcessList, String processDefinitionId) {
ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(Integer.valueOf(processDefinitionId));
if(processDefinition != null){
failedProcessList.add(processDefinitionId+"["+processDefinition.getName()+"]");
}else{
failedProcessList.add(processDefinitionId+"[null]");
if (processDefinition != null) {
failedProcessList.add(processDefinitionId + "[" + processDefinition.getName() + "]");
} else {
failedProcessList.add(processDefinitionId + "[null]");
}
}
/**
* check project and auth
*
* @param loginUser loginUser
* @param projectName projectName
* @return
*/
private Map<String, Object> checkProjectAndAuth(User loginUser, String projectName) {
Project project = projectMapper.queryByName(projectName);
@ -1646,6 +1606,7 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* move process definition
*
* @param processId processId
* @param targetProject targetProject
* @return move result code
@ -1673,19 +1634,20 @@ public class ProcessDefinitionServiceImpl extends BaseService implements
/**
* check batch operate result
*
* @param srcProjectName srcProjectName
* @param targetProjectName targetProjectName
* @param result result
* @param failedProcessList failedProcessList
* @param isCopy isCopy
*/
private void checkBatchOperateResult(String srcProjectName,String targetProjectName,
Map<String, Object> result, List<String> failedProcessList,boolean isCopy) {
private void checkBatchOperateResult(String srcProjectName, String targetProjectName,
Map<String, Object> result, List<String> failedProcessList, boolean isCopy) {
if (!failedProcessList.isEmpty()) {
if(isCopy){
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName,String.join(",", failedProcessList));
}else{
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName,String.join(",", failedProcessList));
if (isCopy) {
putMsg(result, Status.COPY_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName, String.join(",", failedProcessList));
} else {
putMsg(result, Status.MOVE_PROCESS_DEFINITION_ERROR, srcProjectName, targetProjectName, String.join(",", failedProcessList));
}
} else {
putMsg(result, Status.SUCCESS);

548
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/ProcessDefinitionServiceTest.java

@ -30,6 +30,7 @@ import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
@ -38,7 +39,6 @@ import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
@ -68,14 +68,14 @@ import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.mock.web.MockMultipartFile;
import org.springframework.web.multipart.MultipartFile;
@RunWith(MockitoJUnitRunner.Silent.class)
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
@RunWith(MockitoJUnitRunner.class)
public class ProcessDefinitionServiceTest {
@InjectMocks
ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private DataSourceMapper dataSourceMapper;
private ProcessDefinitionServiceImpl processDefinitionService;
@Mock
private ProcessDefinitionMapper processDefineMapper;
@ -98,28 +98,149 @@ public class ProcessDefinitionServiceTest {
@Mock
private TaskInstanceMapper taskInstanceMapper;
private String sqlDependentJson = "{\"globalParams\":[]," +
"\"tasks\":[{\"type\":\"SQL\",\"id\":\"tasks-27297\",\"name\":\"sql\"," +
"\"params\":{\"type\":\"MYSQL\",\"datasource\":1,\"sql\":\"select * from test\"," +
"\"udfs\":\"\",\"sqlType\":\"1\",\"title\":\"\",\"receivers\":\"\",\"receiversCc\":\"\",\"showType\":\"TABLE\"" +
",\"localParams\":[],\"connParams\":\"\"," +
"\"preStatements\":[],\"postStatements\":[]}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\"," +
"\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\"," +
"\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1," +
"\"preTasks\":[\"dependent\"]},{\"type\":\"DEPENDENT\",\"id\":\"tasks-33787\"," +
"\"name\":\"dependent\",\"params\":{},\"description\":\"\",\"runFlag\":\"NORMAL\"," +
"\"dependence\":{\"relation\":\"AND\",\"dependTaskList\":[{\"relation\":\"AND\"," +
"\"dependItemList\":[{\"projectId\":2,\"definitionId\":46,\"depTasks\":\"ALL\"," +
"\"cycle\":\"day\",\"dateValue\":\"today\"}]}]},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
private String shellJson = "{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-9527\",\"name\":\"shell-1\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"#!/bin/bash\\necho \\\"shell-1\\\"\"}," +
"\"description\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":1,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\"," +
"\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
private static final String SHELL_JSON = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9527\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
private static final String CYCLE_SHELL_JSON = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9527\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"tasks-9529\"\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9528\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"tasks-9527\"\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-9529\",\n"
+ " \"name\": \"shell-1\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"#!/bin/bash\\necho \\\"shell-1\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": 1,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"tasks-9528\"\n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
@Test
public void testQueryProcessDefinitionList() {
@ -150,6 +271,7 @@ public class ProcessDefinitionServiceTest {
}
@Test
@SuppressWarnings("unchecked")
public void testQueryProcessDefinitionListPaging() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
@ -168,6 +290,22 @@ public class ProcessDefinitionServiceTest {
Map<String, Object> map = processDefinitionService.queryProcessDefinitionListPaging(loginUser, "project_test1", "", 1, 5, 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
putMsg(result, Status.SUCCESS, projectName);
loginUser.setId(1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Page<ProcessDefinition> page = new Page<>(1, 10);
page.setTotal(30);
Mockito.when(processDefineMapper.queryDefineListPaging(
Mockito.any(IPage.class)
, Mockito.eq("")
, Mockito.eq(loginUser.getId())
, Mockito.eq(project.getId())
, Mockito.anyBoolean())).thenReturn(page);
Map<String, Object> map1 = processDefinitionService.queryProcessDefinitionListPaging(
loginUser, projectName, "", 1, 10, loginUser.getId());
Assert.assertEquals(Status.SUCCESS, map1.get(Constants.STATUS));
}
@Test
@ -206,55 +344,72 @@ public class ProcessDefinitionServiceTest {
}
@Test
public void testCopyProcessDefinition() throws Exception {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectMapper.queryDetailById(1)).thenReturn(getProject(projectName));
public void testBatchCopyProcessDefinition() {
String projectName = "project_test1";
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setUserType(UserType.GENERAL_USER);
// copy project definition ids empty test
Map<String, Object> map = processDefinitionService.batchCopyProcessDefinition(loginUser, projectName, StringUtils.EMPTY, 0);
Assert.assertEquals(Status.PROCESS_DEFINITION_IDS_IS_EMPTY, map.get(Constants.STATUS));
Map<String, Object> result = new HashMap<>();
//project check auth success, instance not exist
// project check auth fail
putMsg(result, Status.PROJECT_NOT_FOUNT, projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Map<String, Object> map1 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectName, String.valueOf(project.getId()), 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map1.get(Constants.STATUS));
// project check auth success, target project is null
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(projectMapper.queryDetailById(0)).thenReturn(null);
Map<String, Object> map2 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectName, String.valueOf(project.getId()), 0);
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map2.get(Constants.STATUS));
// project check auth success, target project name not equal project name, check auth target project fail
Project project1 = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project1);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
putMsg(result, Status.SUCCESS, projectName);
String projectName2 = "project_test2";
Project project2 = getProject(projectName2);
Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(project2);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project2, projectName2)).thenReturn(result);
Mockito.when(projectMapper.queryDetailById(1)).thenReturn(project2);
// instance exit
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
//instance exit
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
Map<String, Object> createProcessResult = new HashMap<>();
putMsg(result, Status.SUCCESS);
Mockito.when(processDefinitionService.createProcessDefinition(
loginUser,
definition.getProjectName(),
definition.getName(),
definition.getProcessDefinitionJson(),
definition.getDescription(),
definition.getLocations(),
definition.getConnects())).thenReturn(createProcessResult);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
Map<String, Object> successRes = processDefinitionService.batchCopyProcessDefinition(loginUser, "project_test1",
"46", 1);
Map<String, Object> map3 = processDefinitionService.batchCopyProcessDefinition(
loginUser, projectName, "46", 1);
Assert.assertEquals(Status.SUCCESS, map3.get(Constants.STATUS));
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void testBatchMoveProcessDefinition() throws Exception {
public void testBatchMoveProcessDefinition() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Project project1 = getProject(projectName);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(project1);
String projectName2 = "project_test2";
Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(getProject(projectName2));
Project project2 = getProject(projectName2);
Mockito.when(projectMapper.queryByName(projectName2)).thenReturn(project2);
int targetProjectId = 2;
Mockito.when(projectMapper.queryDetailById(targetProjectId)).thenReturn(getProjectById(targetProjectId));
@ -273,27 +428,27 @@ public class ProcessDefinitionServiceTest {
putMsg(result2, Status.SUCCESS, targetProject.getName());
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(projectService.checkProjectAndAuth(loginUser, targetProject, targetProject.getName())).thenReturn(result2);
Mockito.when(projectService.checkProjectAndAuth(loginUser, project2, projectName2)).thenReturn(result);
ProcessDefinition definition = getProcessDefinition();
definition.setLocations("{\"tasks-36196\":{\"name\":\"ssh_test1\",\"targetarr\":\"\",\"x\":141,\"y\":70}}");
definition.setProcessDefinitionJson("{\"globalParams\":[],\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-36196\",\"name\":\"ssh_test1\",\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"aa=\\\"1234\\\"\\necho ${aa}\"},\"desc\":\"\",\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\",\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":-1,\"preTasks\":[]}],\"tenantId\":-1,\"timeout\":0}");
definition.setConnects("[]");
//instance exit
// check target project result == null
Mockito.when(processDefineMapper.updateById(definition)).thenReturn(46);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(definition);
putMsg(result, Status.SUCCESS);
Map<String, Object> successRes = processDefinitionService.batchMoveProcessDefinition(loginUser, "project_test1",
"46", 2);
Map<String, Object> successRes = processDefinitionService.batchMoveProcessDefinition(
loginUser, "project_test1", "46", 2);
Assert.assertEquals(Status.SUCCESS, successRes.get(Constants.STATUS));
}
@Test
public void deleteProcessDefinitionByIdTest() throws Exception {
public void deleteProcessDefinitionByIdTest() {
String projectName = "project_test1";
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
@ -379,7 +534,7 @@ public class ProcessDefinitionServiceTest {
Project project = getProject(projectName);
User loginUser = new User();
loginUser.setId(-1);
loginUser.setId(1);
loginUser.setUserType(UserType.GENERAL_USER);
//project check auth fail
@ -390,17 +545,25 @@ public class ProcessDefinitionServiceTest {
6, ReleaseState.OFFLINE.getCode());
Assert.assertEquals(Status.PROJECT_NOT_FOUNT, map.get(Constants.STATUS));
//project check auth success, processs definition online
// project check auth success, processs definition online
putMsg(result, Status.SUCCESS, projectName);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(getProcessDefinition());
Mockito.when(processDefineMapper.updateById(getProcessDefinition())).thenReturn(1);
Map<String, Object> onlineRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
46, ReleaseState.ONLINE.getCode());
Map<String, Object> onlineRes = processDefinitionService.releaseProcessDefinition(
loginUser, "project_test1", 46, ReleaseState.ONLINE.getCode());
Assert.assertEquals(Status.SUCCESS, onlineRes.get(Constants.STATUS));
//release error code
Map<String, Object> failRes = processDefinitionService.releaseProcessDefinition(loginUser, "project_test1",
46, 2);
// project check auth success, processs definition online
ProcessDefinition processDefinition1 = getProcessDefinition();
processDefinition1.setResourceIds("1,2");
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition1);
Mockito.when(processService.getUserById(1)).thenReturn(loginUser);
Map<String, Object> onlineWithResourceRes = processDefinitionService.releaseProcessDefinition(
loginUser, "project_test1", 46, ReleaseState.ONLINE.getCode());
Assert.assertEquals(Status.SUCCESS, onlineWithResourceRes.get(Constants.STATUS));
// release error code
Map<String, Object> failRes = processDefinitionService.releaseProcessDefinition(
loginUser, "project_test1", 46, 2);
Assert.assertEquals(Status.REQUEST_PARAMS_NOT_VALID_ERROR, failRes.get(Constants.STATUS));
//FIXME has function exit code 1 when exception
@ -453,18 +616,24 @@ public class ProcessDefinitionServiceTest {
Map<String, Object> dataNotValidRes = processDefinitionService.checkProcessNodeList(null, "");
Assert.assertEquals(Status.DATA_IS_NOT_VALID, dataNotValidRes.get(Constants.STATUS));
//task not empty
String processDefinitionJson = shellJson;
// task not empty
String processDefinitionJson = SHELL_JSON;
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class);
assert processData != null;
Assert.assertNotNull(processData);
Map<String, Object> taskEmptyRes = processDefinitionService.checkProcessNodeList(processData, processDefinitionJson);
Assert.assertEquals(Status.SUCCESS, taskEmptyRes.get(Constants.STATUS));
//task empty
// task empty
processData.setTasks(null);
Map<String, Object> taskNotEmptyRes = processDefinitionService.checkProcessNodeList(processData, processDefinitionJson);
Assert.assertEquals(Status.DATA_IS_NULL, taskNotEmptyRes.get(Constants.STATUS));
// task cycle
String processDefinitionJsonCycle = CYCLE_SHELL_JSON;
ProcessData processDataCycle = JSONUtils.parseObject(processDefinitionJsonCycle, ProcessData.class);
Map<String, Object> taskCycleRes = processDefinitionService.checkProcessNodeList(processDataCycle, processDefinitionJsonCycle);
Assert.assertEquals(Status.PROCESS_NODE_HAS_CYCLE, taskCycleRes.get(Constants.STATUS));
//json abnormal
String abnormalJson = processDefinitionJson.replaceAll("SHELL", "");
processData = JSONUtils.parseObject(abnormalJson, ProcessData.class);
@ -473,7 +642,7 @@ public class ProcessDefinitionServiceTest {
}
@Test
public void testGetTaskNodeListByDefinitionId() throws Exception {
public void testGetTaskNodeListByDefinitionId() {
//process definition not exist
Mockito.when(processDefineMapper.selectById(46)).thenReturn(null);
Map<String, Object> processDefinitionNullRes = processDefinitionService.getTaskNodeListByDefinitionId(46);
@ -486,14 +655,14 @@ public class ProcessDefinitionServiceTest {
Assert.assertEquals(Status.DATA_IS_NOT_VALID, successRes.get(Constants.STATUS));
//success
processDefinition.setProcessDefinitionJson(shellJson);
processDefinition.setProcessDefinitionJson(SHELL_JSON);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(processDefinition);
Map<String, Object> dataNotValidRes = processDefinitionService.getTaskNodeListByDefinitionId(46);
Assert.assertEquals(Status.SUCCESS, dataNotValidRes.get(Constants.STATUS));
}
@Test
public void testGetTaskNodeListByDefinitionIdList() throws Exception {
public void testGetTaskNodeListByDefinitionIdList() {
//process definition not exist
String defineIdList = "46";
Integer[] idArray = {46};
@ -503,7 +672,7 @@ public class ProcessDefinitionServiceTest {
//process definition exist
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(shellJson);
processDefinition.setProcessDefinitionJson(SHELL_JSON);
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryDefinitionListByIdList(idArray)).thenReturn(processDefinitionList);
@ -515,7 +684,7 @@ public class ProcessDefinitionServiceTest {
public void testQueryProcessDefinitionAllByProjectId() {
int projectId = 1;
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(shellJson);
processDefinition.setProcessDefinitionJson(SHELL_JSON);
List<ProcessDefinition> processDefinitionList = new ArrayList<>();
processDefinitionList.add(processDefinition);
Mockito.when(processDefineMapper.queryAllDefinitionList(projectId)).thenReturn(processDefinitionList);
@ -527,7 +696,7 @@ public class ProcessDefinitionServiceTest {
public void testViewTree() throws Exception {
//process definition not exist
ProcessDefinition processDefinition = getProcessDefinition();
processDefinition.setProcessDefinitionJson(shellJson);
processDefinition.setProcessDefinitionJson(SHELL_JSON);
Mockito.when(processDefineMapper.selectById(46)).thenReturn(null);
Map<String, Object> processDefinitionNullRes = processDefinitionService.viewTree(46, 10);
Assert.assertEquals(Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionNullRes.get(Constants.STATUS));
@ -567,29 +736,71 @@ public class ProcessDefinitionServiceTest {
@Test
public void testImportProcessDefinitionById() throws IOException {
String processJson = "[{\"projectName\":\"testProject\",\"processDefinitionName\":\"shell-4\"," +
"\"processDefinitionJson\":\"{\\\"tenantId\\\":1,\\\"globalParams\\\":[]," +
"\\\"tasks\\\":[{\\\"workerGroupId\\\":\\\"default\\\",\\\"description\\\":\\\"\\\",\\\"runFlag\\\":\\\"NORMAL\\\"," +
"\\\"type\\\":\\\"SHELL\\\",\\\"params\\\":{\\\"rawScript\\\":\\\"#!/bin/bash\\\\necho \\\\\\\"shell-4\\\\\\\"\\\"," +
"\\\"localParams\\\":[],\\\"resourceList\\\":[]},\\\"timeout\\\":{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}," +
"\\\"maxRetryTimes\\\":\\\"0\\\",\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-4\\\"," +
"\\\"dependence\\\":{},\\\"retryInterval\\\":\\\"1\\\",\\\"preTasks\\\":[],\\\"id\\\":\\\"tasks-84090\\\"}," +
"{\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-5\\\",\\\"workerGroupId\\\":\\\"default\\\\," +
"\\\"description\\\":\\\"\\\",\\\"dependence\\\":{},\\\"preTasks\\\":[\\\"shell-4\\\"],\\\"id\\\":\\\"tasks-87364\\\"," +
"\\\"runFlag\\\":\\\"NORMAL\\\",\\\"type\\\":\\\"SUB_PROCESS\\\",\\\"params\\\":{\\\"processDefinitionId\\\":46}," +
"\\\"timeout\\\":{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"}}],\\\"timeout\\\":0}\"," +
"\"processDefinitionDescription\":\"\",\"processDefinitionLocations\":\"{\\\"tasks-84090\\\":{\\\"name\\\":\\\"shell-4\\\"," +
"\\\"targetarr\\\":\\\"\\\",\\\"x\\\":128,\\\"y\\\":114},\\\"tasks-87364\\\":{\\\"name\\\":\\\"shell-5\\\"," +
"\\\"targetarr\\\":\\\"tasks-84090\\\",\\\"x\\\":266,\\\"y\\\":115}}\"," +
"\"processDefinitionConnects\":\"[{\\\"endPointSourceId\\\":\\\"tasks-84090\\\"," +
"\\\"endPointTargetId\\\":\\\"tasks-87364\\\"}]\"}]";
String subProcessJson = "{\"globalParams\":[]," +
"\"tasks\":[{\"type\":\"SHELL\",\"id\":\"tasks-52423\",\"name\":\"shell-5\"," +
"\"params\":{\"resourceList\":[],\"localParams\":[],\"rawScript\":\"echo \\\"shell-5\\\"\"},\"description\":\"\"," +
"\"runFlag\":\"NORMAL\",\"dependence\":{},\"maxRetryTimes\":\"0\",\"retryInterval\":\"1\"," +
"\"timeout\":{\"strategy\":\"\",\"interval\":null,\"enable\":false},\"taskInstancePriority\":\"MEDIUM\",\"workerGroupId\":\\\"default\\\\," +
"\"preTasks\":[]}],\"tenantId\":1,\"timeout\":0}";
String processJson = "[\n"
+ " {\n"
+ " \"projectName\": \"testProject\",\n"
+ " \"processDefinitionName\": \"shell-4\",\n"
+ " \"processDefinitionJson\": \"{\\\"tenantId\\\":1"
+ ",\\\"globalParams\\\":[],\\\"tasks\\\":[{\\\"workerGroupId\\\":\\\"3\\\",\\\"description\\\""
+ ":\\\"\\\",\\\"runFlag\\\":\\\"NORMAL\\\",\\\"type\\\":\\\"SHELL\\\",\\\"params\\\":{\\\"rawScript\\\""
+ ":\\\"#!/bin/bash\\\\necho \\\\\\\"shell-4\\\\\\\"\\\",\\\"localParams\\\":[],\\\"resourceList\\\":[]}"
+ ",\\\"timeout\\\":{\\\"enable\\\":false,\\\"strategy\\\":\\\"\\\"},\\\"maxRetryTimes\\\":\\\"0\\\""
+ ",\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-4\\\",\\\"dependence\\\":{}"
+ ",\\\"retryInterval\\\":\\\"1\\\",\\\"preTasks\\\":[],\\\"id\\\":\\\"tasks-84090\\\"}"
+ ",{\\\"taskInstancePriority\\\":\\\"MEDIUM\\\",\\\"name\\\":\\\"shell-5\\\",\\\"workerGroupId\\\""
+ ":\\\"3\\\",\\\"description\\\":\\\"\\\",\\\"dependence\\\":{},\\\"preTasks\\\":[\\\"shell-4\\\"]"
+ ",\\\"id\\\":\\\"tasks-87364\\\",\\\"runFlag\\\":\\\"NORMAL\\\",\\\"type\\\":\\\"SUB_PROCESS\\\""
+ ",\\\"params\\\":{\\\"processDefinitionId\\\":46},\\\"timeout\\\":{\\\"enable\\\":false"
+ ",\\\"strategy\\\":\\\"\\\"}}],\\\"timeout\\\":0}\",\n"
+ " \"processDefinitionDescription\": \"\",\n"
+ " \"processDefinitionLocations\": \"{\\\"tasks-84090\\\":{\\\"name\\\":\\\"shell-4\\\""
+ ",\\\"targetarr\\\":\\\"\\\",\\\"x\\\":128,\\\"y\\\":114},\\\"tasks-87364\\\":{\\\"name\\\""
+ ":\\\"shell-5\\\",\\\"targetarr\\\":\\\"tasks-84090\\\",\\\"x\\\":266,\\\"y\\\":115}}\",\n"
+ " \"processDefinitionConnects\": \"[{\\\"endPointSourceId\\\":\\\"tasks-84090\\\""
+ ",\\\"endPointTargetId\\\":\\\"tasks-87364\\\"}]\"\n"
+ " }\n"
+ "]";
String subProcessJson = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SHELL\",\n"
+ " \"id\": \"tasks-52423\",\n"
+ " \"name\": \"shell-5\",\n"
+ " \"params\": {\n"
+ " \"resourceList\": [\n"
+ " \n"
+ " ],\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"rawScript\": \"echo \\\"shell-5\\\"\"\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"interval\": null,\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": \"3\",\n"
+ " \"preTasks\": [\n"
+ " \n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
FileUtils.writeStringToFile(new File("/tmp/task.json"), processJson);
@ -618,28 +829,13 @@ public class ProcessDefinitionServiceTest {
Mockito.when(projectService.checkProjectAndAuth(loginUser, getProject(currentProjectName), currentProjectName)).thenReturn(result);
Mockito.when(processDefineMapper.queryByDefineId(46)).thenReturn(shellDefinition2);
//import process
// Map<String, Object> importProcessResult = processDefinitionService.importProcessDefinition(loginUser, multipartFile, currentProjectName);
//
// Assert.assertEquals(Status.SUCCESS, importProcessResult.get(Constants.STATUS));
//
// boolean delete = file.delete();
//
// Assert.assertTrue(delete);
// String processMetaJson = "";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
//
// processMetaJson = "{\"scheduleWorkerGroupId\":-1}";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
//
// processMetaJson = "{\"scheduleWorkerGroupId\":-1,\"projectName\":\"test\"}";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
//
// processMetaJson = "{\"scheduleWorkerGroupId\":-1,\"projectName\":\"test\",\"processDefinitionName\":\"test_definition\"}";
// improssProcessCheckData(file, loginUser, currentProjectName, processMetaJson);
Map<String, Object> importProcessResult = processDefinitionService.importProcessDefinition(loginUser, multipartFile, currentProjectName);
Assert.assertEquals(Status.SUCCESS, importProcessResult.get(Constants.STATUS));
boolean delete = file.delete();
Assert.assertTrue(delete);
}
@Test
@ -658,12 +854,122 @@ public class ProcessDefinitionServiceTest {
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
Mockito.when(processService.findProcessDefineById(1)).thenReturn(getProcessDefinition());
String sqlDependentJson = "{\n"
+ " \"globalParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"tasks\": [\n"
+ " {\n"
+ " \"type\": \"SQL\",\n"
+ " \"id\": \"tasks-27297\",\n"
+ " \"name\": \"sql\",\n"
+ " \"params\": {\n"
+ " \"type\": \"MYSQL\",\n"
+ " \"datasource\": 1,\n"
+ " \"sql\": \"select * from test\",\n"
+ " \"udfs\": \"\",\n"
+ " \"sqlType\": \"1\",\n"
+ " \"title\": \"\",\n"
+ " \"receivers\": \"\",\n"
+ " \"receiversCc\": \"\",\n"
+ " \"showType\": \"TABLE\",\n"
+ " \"localParams\": [\n"
+ " \n"
+ " ],\n"
+ " \"connParams\": \"\",\n"
+ " \"preStatements\": [\n"
+ " \n"
+ " ],\n"
+ " \"postStatements\": [\n"
+ " \n"
+ " ]\n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \"dependent\"\n"
+ " ]\n"
+ " },\n"
+ " {\n"
+ " \"type\": \"DEPENDENT\",\n"
+ " \"id\": \"tasks-33787\",\n"
+ " \"name\": \"dependent\",\n"
+ " \"params\": {\n"
+ " \n"
+ " },\n"
+ " \"description\": \"\",\n"
+ " \"runFlag\": \"NORMAL\",\n"
+ " \"dependence\": {\n"
+ " \"relation\": \"AND\",\n"
+ " \"dependTaskList\": [\n"
+ " {\n"
+ " \"relation\": \"AND\",\n"
+ " \"dependItemList\": [\n"
+ " {\n"
+ " \"projectId\": 2,\n"
+ " \"definitionId\": 46,\n"
+ " \"depTasks\": \"ALL\",\n"
+ " \"cycle\": \"day\",\n"
+ " \"dateValue\": \"today\"\n"
+ " }\n"
+ " ]\n"
+ " }\n"
+ " ]\n"
+ " },\n"
+ " \"maxRetryTimes\": \"0\",\n"
+ " \"retryInterval\": \"1\",\n"
+ " \"timeout\": {\n"
+ " \"strategy\": \"\",\n"
+ " \"enable\": false\n"
+ " },\n"
+ " \"taskInstancePriority\": \"MEDIUM\",\n"
+ " \"workerGroupId\": -1,\n"
+ " \"preTasks\": [\n"
+ " \n"
+ " ]\n"
+ " }\n"
+ " ],\n"
+ " \"tenantId\": 1,\n"
+ " \"timeout\": 0\n"
+ "}";
Map<String, Object> updateResult = processDefinitionService.updateProcessDefinition(loginUser, projectName, 1, "test",
sqlDependentJson, "", "", "");
Assert.assertEquals(Status.UPDATE_PROCESS_DEFINITION_ERROR, updateResult.get(Constants.STATUS));
}
@Test
public void testBatchExportProcessDefinitionByIds() {
processDefinitionService.batchExportProcessDefinitionByIds(
null, null, null, null);
User loginUser = new User();
loginUser.setId(1);
loginUser.setUserType(UserType.ADMIN_USER);
String projectName = "project_test1";
Project project = getProject(projectName);
Map<String, Object> result = new HashMap<>();
putMsg(result, Status.PROJECT_NOT_FOUNT);
Mockito.when(projectMapper.queryByName(projectName)).thenReturn(getProject(projectName));
Mockito.when(projectService.checkProjectAndAuth(loginUser, project, projectName)).thenReturn(result);
processDefinitionService.batchExportProcessDefinitionByIds(
loginUser, projectName, "1", null);
}
/**
* get mock datasource
*
@ -715,7 +1021,7 @@ public class ProcessDefinitionServiceTest {
*/
private Project getProjectById(int projectId) {
Project project = new Project();
project.setId(1);
project.setId(projectId);
project.setName("project_test2");
project.setUserId(1);
return project;

17
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/Constants.java

@ -138,7 +138,7 @@ public final class Constants {
/**
* python home
*/
public static final String PYTHON_HOME="PYTHON_HOME";
public static final String PYTHON_HOME = "PYTHON_HOME";
/**
* resource.view.suffixs
@ -366,7 +366,6 @@ public final class Constants {
public static final double DEFAULT_WORKER_RESERVED_MEMORY = OSUtils.totalMemorySize() / 10;
/**
* default log cache rows num,output when reach the number
*/
@ -752,7 +751,7 @@ public final class Constants {
/**
* preview schedule execute count
* preview schedule execute count
*/
public static final int PREVIEW_SCHEDULE_EXECUTE_COUNT = 5;
@ -832,6 +831,7 @@ public final class Constants {
public static final int[] NOT_TERMINATED_STATES = new int[]{
ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal(),
ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal(),
@ -852,18 +852,17 @@ public final class Constants {
/**
* data total
*/
public static final String COUNT = "count";
public static final String COUNT = "count";
/**
* page size
*/
public static final String PAGE_SIZE = "pageSize";
public static final String PAGE_SIZE = "pageSize";
/**
* current page no
*/
public static final String PAGE_NUMBER = "pageNo";
public static final String PAGE_NUMBER = "pageNo";
/**
@ -966,11 +965,11 @@ public final class Constants {
/**
* authorize writable perm
*/
public static final int AUTHORIZE_WRITABLE_PERM=7;
public static final int AUTHORIZE_WRITABLE_PERM = 7;
/**
* authorize readable perm
*/
public static final int AUTHORIZE_READABLE_PERM=4;
public static final int AUTHORIZE_READABLE_PERM = 4;
/**

109
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/ExecutionStatus.java

@ -14,16 +14,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import java.util.HashMap;
import com.baomidou.mybatisplus.annotation.EnumValue;
import java.util.HashMap;
/**
* running status for workflow and task nodes
*
*/
public enum ExecutionStatus {
@ -41,6 +40,7 @@ public enum ExecutionStatus {
* 9 kill
* 10 waiting thread
* 11 waiting depend node complete
* 12 delay execution
*/
SUBMITTED_SUCCESS(0, "submit success"),
RUNNING_EXECUTION(1, "running"),
@ -53,9 +53,10 @@ public enum ExecutionStatus {
NEED_FAULT_TOLERANCE(8, "need fault tolerance"),
KILL(9, "kill"),
WAITTING_THREAD(10, "waiting thread"),
WAITTING_DEPEND(11, "waiting depend node complete");
WAITTING_DEPEND(11, "waiting depend node complete"),
DELAY_EXECUTION(12, "delay execution");
ExecutionStatus(int code, String descp){
ExecutionStatus(int code, String descp) {
this.code = code;
this.descp = descp;
}
@ -64,77 +65,85 @@ public enum ExecutionStatus {
private final int code;
private final String descp;
private static HashMap<Integer, ExecutionStatus> EXECUTION_STATUS_MAP=new HashMap<>();
private static HashMap<Integer, ExecutionStatus> EXECUTION_STATUS_MAP = new HashMap<>();
static {
for (ExecutionStatus executionStatus:ExecutionStatus.values()){
EXECUTION_STATUS_MAP.put(executionStatus.code,executionStatus);
}
for (ExecutionStatus executionStatus : ExecutionStatus.values()) {
EXECUTION_STATUS_MAP.put(executionStatus.code, executionStatus);
}
}
/**
* status is success
* @return status
*/
public boolean typeIsSuccess(){
return this == SUCCESS;
}
/**
* status is failure
* @return status
*/
public boolean typeIsFailure(){
return this == FAILURE || this == NEED_FAULT_TOLERANCE || this == KILL;
}
/**
* status is finished
* @return status
*/
public boolean typeIsFinished(){
return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause()
|| typeIsStop();
}
/**
* status is success
*
* @return status
*/
public boolean typeIsSuccess() {
return this == SUCCESS;
}
/**
* status is failure
*
* @return status
*/
public boolean typeIsFailure() {
return this == FAILURE || this == NEED_FAULT_TOLERANCE || this == KILL;
}
/**
* status is finished
*
* @return status
*/
public boolean typeIsFinished() {
return typeIsSuccess() || typeIsFailure() || typeIsCancel() || typeIsPause()
|| typeIsStop();
}
/**
* status is waiting thread
*
* @return status
*/
public boolean typeIsWaitingThread(){
return this == WAITTING_THREAD;
}
public boolean typeIsWaitingThread() {
return this == WAITTING_THREAD;
}
/**
* status is pause
*
* @return status
*/
public boolean typeIsPause(){
return this == PAUSE;
}
public boolean typeIsPause() {
return this == PAUSE;
}
/**
* status is pause
*
* @return status
*/
public boolean typeIsStop(){
public boolean typeIsStop() {
return this == STOP;
}
/**
* status is running
*
* @return status
*/
public boolean typeIsRunning(){
return this == RUNNING_EXECUTION || this == WAITTING_DEPEND;
}
public boolean typeIsRunning() {
return this == RUNNING_EXECUTION || this == WAITTING_DEPEND || this == DELAY_EXECUTION;
}
/**
* status is cancel
*
* @return status
*/
public boolean typeIsCancel(){
return this == KILL || this == STOP ;
public boolean typeIsCancel() {
return this == KILL || this == STOP;
}
public int getCode() {
@ -145,10 +154,10 @@ public enum ExecutionStatus {
return descp;
}
public static ExecutionStatus of(int status){
if(EXECUTION_STATUS_MAP.containsKey(status)){
return EXECUTION_STATUS_MAP.get(status);
}
public static ExecutionStatus of(int status) {
if (EXECUTION_STATUS_MAP.containsKey(status)) {
return EXECUTION_STATUS_MAP.get(status);
}
throw new IllegalArgumentException("invalid status : " + status);
}
}

10
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/enums/TaskStateType.java

@ -31,12 +31,13 @@ public enum TaskStateType {
/**
* convert task state to execute status integer array ;
*
* @param taskStateType task state type
* @return result of execution status
*/
public static int[] convert2ExecutStatusIntArray(TaskStateType taskStateType){
public static int[] convert2ExecutStatusIntArray(TaskStateType taskStateType) {
switch (taskStateType){
switch (taskStateType) {
case SUCCESS:
return new int[]{ExecutionStatus.SUCCESS.ordinal()};
case FAILED:
@ -51,14 +52,15 @@ public enum TaskStateType {
case RUNNING:
return new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
case WAITTING:
return new int[]{
ExecutionStatus.SUBMITTED_SUCCESS.ordinal()
};
default:
break;
default:
break;
}
return new int[0];
}

50
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/model/TaskNode.java

@ -136,6 +136,11 @@ public class TaskNode {
@JsonSerialize(using = JSONUtils.JsonDataSerializer.class)
private String timeout;
/**
* delay execution time.
*/
private int delayTime;
public String getId() {
return id;
}
@ -310,24 +315,25 @@ public class TaskNode {
@Override
public String toString() {
return "TaskNode{" +
"id='" + id + '\'' +
", name='" + name + '\'' +
", desc='" + desc + '\'' +
", type='" + type + '\'' +
", runFlag='" + runFlag + '\'' +
", loc='" + loc + '\'' +
", maxRetryTimes=" + maxRetryTimes +
", retryInterval=" + retryInterval +
", params='" + params + '\'' +
", preTasks='" + preTasks + '\'' +
", extras='" + extras + '\'' +
", depList=" + depList +
", dependence='" + dependence + '\'' +
", taskInstancePriority=" + taskInstancePriority +
", timeout='" + timeout + '\'' +
", workerGroup='" + workerGroup + '\'' +
'}';
return "TaskNode{"
+ "id='" + id + '\''
+ ", name='" + name + '\''
+ ", desc='" + desc + '\''
+ ", type='" + type + '\''
+ ", runFlag='" + runFlag + '\''
+ ", loc='" + loc + '\''
+ ", maxRetryTimes=" + maxRetryTimes
+ ", retryInterval=" + retryInterval
+ ", params='" + params + '\''
+ ", preTasks='" + preTasks + '\''
+ ", extras='" + extras + '\''
+ ", depList=" + depList
+ ", dependence='" + dependence + '\''
+ ", taskInstancePriority=" + taskInstancePriority
+ ", timeout='" + timeout + '\''
+ ", workerGroup='" + workerGroup + '\''
+ ", delayTime=" + delayTime
+ '}';
}
public String getWorkerGroup() {
@ -353,4 +359,12 @@ public class TaskNode {
public void setWorkerGroupId(Integer workerGroupId) {
this.workerGroupId = workerGroupId;
}
public int getDelayTime() {
return delayTime;
}
public void setDelayTime(int delayTime) {
this.delayTime = delayTime;
}
}

444
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/DateUtils.java

File diff suppressed because one or more lines are too long

32
dolphinscheduler-common/src/test/java/org/apache/dolphinscheduler/common/enums/ExecutionStatusTest.java

@ -0,0 +1,32 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.enums;
import junit.framework.TestCase;
/**
* execution status test.
*/
public class ExecutionStatusTest extends TestCase {
public void testTypeIsRunning() {
assertTrue(ExecutionStatus.RUNNING_EXECUTION.typeIsRunning());
assertTrue(ExecutionStatus.WAITTING_DEPEND.typeIsRunning());
assertTrue(ExecutionStatus.DELAY_EXECUTION.typeIsRunning());
}
}

81
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/AlertDao.java

@ -14,15 +14,14 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.dao;
package org.apache.dolphinscheduler.dao;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.apache.dolphinscheduler.common.enums.AlertStatus;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.datasource.ConnectionFactory;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
@ -30,13 +29,17 @@ import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.AlertMapper;
import org.apache.dolphinscheduler.dao.mapper.UserAlertGroupMapper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class AlertDao extends AbstractBaseDao {
@ -56,21 +59,23 @@ public class AlertDao extends AbstractBaseDao {
/**
* insert alert
*
* @param alert alert
* @return add alert result
*/
public int addAlert(Alert alert){
public int addAlert(Alert alert) {
return alertMapper.insert(alert);
}
/**
* update alert
*
* @param alertStatus alertStatus
* @param log log
* @param id id
* @return update alert result
*/
public int updateAlert(AlertStatus alertStatus,String log,int id){
public int updateAlert(AlertStatus alertStatus, String log, int id) {
Alert alert = alertMapper.selectById(id);
alert.setAlertStatus(alertStatus);
alert.setUpdateTime(new Date());
@ -80,46 +85,61 @@ public class AlertDao extends AbstractBaseDao {
/**
* query user list by alert group id
*
* @param alerGroupId alerGroupId
* @return user list
*/
public List<User> queryUserByAlertGroupId(int alerGroupId){
public List<User> queryUserByAlertGroupId(int alerGroupId) {
return userAlertGroupMapper.listUserByAlertgroupId(alerGroupId);
}
/**
* MasterServer or WorkerServer stoped
*
* @param alertgroupId alertgroupId
* @param host host
* @param serverType serverType
*/
public void sendServerStopedAlert(int alertgroupId,String host,String serverType){
public void sendServerStopedAlert(int alertgroupId, String host, String serverType) {
Alert alert = new Alert();
String content = String.format("[{'type':'%s','host':'%s','event':'server down','warning level':'serious'}]",
serverType, host);
List<LinkedHashMap> serverStopList = new ArrayList<>(1);
LinkedHashMap<String, String> serverStopedMap = new LinkedHashMap();
serverStopedMap.put("type", serverType);
serverStopedMap.put("host", host);
serverStopedMap.put("event", "server down");
serverStopedMap.put("warning level", "serious");
serverStopList.add(serverStopedMap);
String content = JSONUtils.toJsonString(serverStopList);
alert.setTitle("Fault tolerance warning");
saveTaskTimeoutAlert(alert, content, alertgroupId, null, null);
}
/**
* process time out alert
*
* @param processInstance processInstance
* @param processDefinition processDefinition
*/
public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition){
public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) {
int alertgroupId = processInstance.getWarningGroupId();
String receivers = processDefinition.getReceivers();
String receiversCc = processDefinition.getReceiversCc();
Alert alert = new Alert();
String content = String.format("[{'id':'%d','name':'%s','event':'timeout','warnLevel':'middle'}]",
processInstance.getId(), processInstance.getName());
List<LinkedHashMap> processTimeoutList = new ArrayList<>(1);
LinkedHashMap<String, String> processTimeoutMap = new LinkedHashMap();
processTimeoutMap.put("id", String.valueOf(processInstance.getId()));
processTimeoutMap.put("name", processInstance.getName());
processTimeoutMap.put("event", "timeout");
processTimeoutMap.put("warnLevel", "middle");
processTimeoutList.add(processTimeoutMap);
String content = JSONUtils.toJsonString(processTimeoutList);
alert.setTitle("Process Timeout Warn");
saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc);
}
private void saveTaskTimeoutAlert(Alert alert, String content, int alertgroupId,
String receivers, String receiversCc){
private void saveTaskTimeoutAlert(Alert alert, String content, int alertgroupId,
String receivers, String receiversCc) {
alert.setShowType(ShowType.TABLE);
alert.setContent(content);
alert.setAlertType(AlertType.EMAIL);
@ -135,9 +155,9 @@ public class AlertDao extends AbstractBaseDao {
alertMapper.insert(alert);
}
/**
* task timeout warn
*
* @param alertgroupId alertgroupId
* @param receivers receivers
* @param receiversCc receiversCc
@ -146,34 +166,45 @@ public class AlertDao extends AbstractBaseDao {
* @param taskId taskId
* @param taskName taskName
*/
public void sendTaskTimeoutAlert(int alertgroupId,String receivers,String receiversCc, int processInstanceId,
String processInstanceName, int taskId,String taskName){
public void sendTaskTimeoutAlert(int alertgroupId, String receivers, String receiversCc, int processInstanceId,
String processInstanceName, int taskId, String taskName) {
Alert alert = new Alert();
String content = String.format("[{'process instance id':'%d','task name':'%s','task id':'%d','task name':'%s'," +
"'event':'timeout','warnLevel':'middle'}]", processInstanceId, processInstanceName, taskId, taskName);
List<LinkedHashMap> taskTimeoutList = new ArrayList<>(1);
LinkedHashMap<String, String> taskTimeoutMap = new LinkedHashMap();
taskTimeoutMap.put("process instance id", String.valueOf(processInstanceId));
taskTimeoutMap.put("process name", processInstanceName);
taskTimeoutMap.put("task id", String.valueOf(taskId));
taskTimeoutMap.put("task name", taskName);
taskTimeoutMap.put("event", "timeout");
taskTimeoutMap.put("warnLevel", "middle");
taskTimeoutList.add(taskTimeoutMap);
String content = JSONUtils.toJsonString(taskTimeoutList);
alert.setTitle("Task Timeout Warn");
saveTaskTimeoutAlert(alert, content, alertgroupId, receivers, receiversCc);
}
/**
* list the alert information of waiting to be executed
*
* @return alert list
*/
public List<Alert> listWaitExecutionAlert(){
public List<Alert> listWaitExecutionAlert() {
return alertMapper.listAlertByStatus(AlertStatus.WAIT_EXECUTION);
}
/**
* list user information by alert group id
*
* @param alertgroupId alertgroupId
* @return user list
*/
public List<User> listUserByAlertgroupId(int alertgroupId){
public List<User> listUserByAlertgroupId(int alertgroupId) {
return userAlertGroupMapper.listUserByAlertgroupId(alertgroupId);
}
/**
* for test
*
* @return AlertMapper
*/
public AlertMapper getAlertMapper() {

156
dolphinscheduler-dao/src/main/java/org/apache/dolphinscheduler/dao/entity/TaskInstance.java

@ -16,24 +16,23 @@
*/
package org.apache.dolphinscheduler.dao.entity;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.fasterxml.jackson.annotation.JsonFormat;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.utils.*;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import java.io.Serializable;
import java.util.Date;
import java.util.Map;
import com.baomidou.mybatisplus.annotation.IdType;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableId;
import com.baomidou.mybatisplus.annotation.TableName;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
* task instance
*/
@ -43,7 +42,7 @@ public class TaskInstance implements Serializable {
/**
* id
*/
@TableId(value="id", type=IdType.AUTO)
@TableId(value = "id", type = IdType.AUTO)
private int id;
/**
@ -52,7 +51,6 @@ public class TaskInstance implements Serializable {
private String name;
/**
* task type
*/
@ -84,22 +82,28 @@ public class TaskInstance implements Serializable {
*/
private ExecutionStatus state;
/**
* task first submit time.
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date firstSubmitTime;
/**
* task submit time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date submitTime;
/**
* task start time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date startTime;
/**
* task end time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date endTime;
/**
@ -215,11 +219,14 @@ public class TaskInstance implements Serializable {
@TableField(exist = false)
private Map<String,String> resources;
private Map<String, String> resources;
/**
* delay execution time.
*/
private int delayTime;
public void init(String host,Date startTime,String executePath){
public void init(String host, Date startTime, String executePath) {
this.host = host;
this.startTime = startTime;
this.executePath = executePath;
@ -298,6 +305,14 @@ public class TaskInstance implements Serializable {
this.state = state;
}
public Date getFirstSubmitTime() {
return firstSubmitTime;
}
public void setFirstSubmitTime(Date firstSubmitTime) {
this.firstSubmitTime = firstSubmitTime;
}
public Date getSubmitTime() {
return submitTime;
}
@ -362,7 +377,7 @@ public class TaskInstance implements Serializable {
this.retryTimes = retryTimes;
}
public Boolean isTaskSuccess(){
public Boolean isTaskSuccess() {
return this.state == ExecutionStatus.SUCCESS;
}
@ -382,16 +397,16 @@ public class TaskInstance implements Serializable {
this.appLink = appLink;
}
public String getDependency(){
if(this.dependency != null){
public String getDependency() {
if (this.dependency != null) {
return this.dependency;
}
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
return taskNode == null ? null : taskNode.getDependence();
}
return taskNode.getDependence();
public void setDependency(String dependency) {
this.dependency = dependency;
}
public Flag getFlag() {
@ -401,6 +416,7 @@ public class TaskInstance implements Serializable {
public void setFlag(Flag flag) {
this.flag = flag;
}
public String getProcessInstanceName() {
return processInstanceName;
}
@ -465,40 +481,36 @@ public class TaskInstance implements Serializable {
this.resources = resources;
}
public boolean isSubProcess(){
public boolean isSubProcess() {
return TaskType.SUB_PROCESS.equals(TaskType.valueOf(this.taskType));
}
public boolean isDependTask(){
public boolean isDependTask() {
return TaskType.DEPENDENT.equals(TaskType.valueOf(this.taskType));
}
public boolean isConditionsTask(){
public boolean isConditionsTask() {
return TaskType.CONDITIONS.equals(TaskType.valueOf(this.taskType));
}
/**
* determine if you can try again
*
* @return can try result
*/
public boolean taskCanRetry() {
if(this.isSubProcess()){
if (this.isSubProcess()) {
return false;
}
if(this.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE){
if (this.getState() == ExecutionStatus.NEED_FAULT_TOLERANCE) {
return true;
}else {
} else {
return (this.getState().typeIsFailure()
&& this.getRetryTimes() < this.getMaxRetryTimes());
&& this.getRetryTimes() < this.getMaxRetryTimes());
}
}
public void setDependency(String dependency) {
this.dependency = dependency;
}
public Priority getTaskInstancePriority() {
return taskInstancePriority;
}
@ -531,40 +543,50 @@ public class TaskInstance implements Serializable {
this.dependentResult = dependentResult;
}
public int getDelayTime() {
return delayTime;
}
public void setDelayTime(int delayTime) {
this.delayTime = delayTime;
}
@Override
public String toString() {
return "TaskInstance{" +
"id=" + id +
", name='" + name + '\'' +
", taskType='" + taskType + '\'' +
", processDefinitionId=" + processDefinitionId +
", processInstanceId=" + processInstanceId +
", processInstanceName='" + processInstanceName + '\'' +
", taskJson='" + taskJson + '\'' +
", state=" + state +
", submitTime=" + submitTime +
", startTime=" + startTime +
", endTime=" + endTime +
", host='" + host + '\'' +
", executePath='" + executePath + '\'' +
", logPath='" + logPath + '\'' +
", retryTimes=" + retryTimes +
", alertFlag=" + alertFlag +
", processInstance=" + processInstance +
", processDefine=" + processDefine +
", pid=" + pid +
", appLink='" + appLink + '\'' +
", flag=" + flag +
", dependency='" + dependency + '\'' +
", duration=" + duration +
", maxRetryTimes=" + maxRetryTimes +
", retryInterval=" + retryInterval +
", taskInstancePriority=" + taskInstancePriority +
", processInstancePriority=" + processInstancePriority +
", dependentResult='" + dependentResult + '\'' +
", workerGroup='" + workerGroup + '\'' +
", executorId=" + executorId +
", executorName='" + executorName + '\'' +
'}';
return "TaskInstance{"
+ "id=" + id
+ ", name='" + name + '\''
+ ", taskType='" + taskType + '\''
+ ", processDefinitionId=" + processDefinitionId
+ ", processInstanceId=" + processInstanceId
+ ", processInstanceName='" + processInstanceName + '\''
+ ", taskJson='" + taskJson + '\''
+ ", state=" + state
+ ", firstSubmitTime=" + firstSubmitTime
+ ", submitTime=" + submitTime
+ ", startTime=" + startTime
+ ", endTime=" + endTime
+ ", host='" + host + '\''
+ ", executePath='" + executePath + '\''
+ ", logPath='" + logPath + '\''
+ ", retryTimes=" + retryTimes
+ ", alertFlag=" + alertFlag
+ ", processInstance=" + processInstance
+ ", processDefine=" + processDefine
+ ", pid=" + pid
+ ", appLink='" + appLink + '\''
+ ", flag=" + flag
+ ", dependency='" + dependency + '\''
+ ", duration=" + duration
+ ", maxRetryTimes=" + maxRetryTimes
+ ", retryInterval=" + retryInterval
+ ", taskInstancePriority=" + taskInstancePriority
+ ", processInstancePriority=" + processInstancePriority
+ ", dependentResult='" + dependentResult + '\''
+ ", workerGroup='" + workerGroup + '\''
+ ", executorId=" + executorId
+ ", executorName='" + executorName + '\''
+ ", delayTime=" + delayTime
+ '}';
}
}

32
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/entity/TaskInstanceTest.java

@ -16,6 +16,9 @@
*/
package org.apache.dolphinscheduler.dao.entity;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.junit.Assert;
import org.junit.Test;
@ -43,7 +46,36 @@ public class TaskInstanceTest {
//sub process
taskInstance.setTaskType("DEPENDENT");
Assert.assertTrue(taskInstance.isDependTask());
}
/**
* test for TaskInstance.getDependence
*/
@Test
public void testTaskInstanceGetDependence() {
TaskInstance taskInstance;
TaskNode taskNode;
taskInstance = new TaskInstance();
taskInstance.setTaskJson(null);
Assert.assertNull(taskInstance.getDependency());
taskInstance = new TaskInstance();
taskNode = new TaskNode();
taskNode.setDependence(null);
taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode));
Assert.assertNull(taskInstance.getDependency());
taskInstance = new TaskInstance();
taskNode = new TaskNode();
// expect a JSON here, and will be unwrap when toJsonString
taskNode.setDependence("\"A\"");
taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode));
Assert.assertEquals("A", taskInstance.getDependency());
taskInstance = new TaskInstance();
taskInstance.setTaskJson(null);
taskInstance.setDependency("{}");
Assert.assertEquals("{}", taskInstance.getDependency());
}
}

75
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/AccessTokenMapperTest.java

@ -34,6 +34,7 @@ import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Resource;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.ThreadLocalRandom;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.greaterThan;
@ -45,7 +46,7 @@ import static org.junit.Assert.*;
@RunWith(SpringRunner.class)
@SpringBootTest
@Transactional
@Rollback(true)
@Rollback
public class AccessTokenMapperTest {
@Autowired
@ -56,10 +57,11 @@ public class AccessTokenMapperTest {
/**
* test insert
*
* @throws Exception
*/
@Test
public void testInsert() throws Exception{
public void testInsert() throws Exception {
Integer userId = 1;
AccessToken accessToken = createAccessToken(userId);
@ -69,10 +71,11 @@ public class AccessTokenMapperTest {
/**
* test select by id
*
* @throws Exception
*/
@Test
public void testSelectById() throws Exception{
public void testSelectById() throws Exception {
Integer userId = 1;
AccessToken accessToken = createAccessToken(userId);
AccessToken resultAccessToken = accessTokenMapper.selectById(accessToken.getId());
@ -81,6 +84,7 @@ public class AccessTokenMapperTest {
/**
* test hashCode method
*
* @throws Exception
*/
@Test
@ -94,6 +98,7 @@ public class AccessTokenMapperTest {
/**
* test equals method
*
* @throws Exception
*/
@Test
@ -108,7 +113,7 @@ public class AccessTokenMapperTest {
* test page
*/
@Test
public void testSelectAccessTokenPage() throws Exception{
public void testSelectAccessTokenPage() throws Exception {
Integer count = 4;
String userName = "zhangsan";
@ -120,11 +125,11 @@ public class AccessTokenMapperTest {
Page page = new Page(offset, size);
IPage<AccessToken> accessTokenPage = accessTokenMapper.selectAccessTokenPage(page, userName, 0);
assertEquals(Integer.valueOf(accessTokenPage.getRecords().size()),size);
assertEquals(Integer.valueOf(accessTokenPage.getRecords().size()), size);
for (AccessToken accessToken : accessTokenPage.getRecords()){
for (AccessToken accessToken : accessTokenPage.getRecords()) {
AccessToken resultAccessToken = accessTokenMap.get(accessToken.getId());
assertEquals(accessToken,resultAccessToken);
assertEquals(accessToken, resultAccessToken);
}
}
@ -133,14 +138,17 @@ public class AccessTokenMapperTest {
* test update
*/
@Test
public void testUpdate() throws Exception{
public void testUpdate() throws Exception {
Integer userId = 1;
AccessToken accessToken = createAccessToken(userId);
//update
accessToken.setToken("56789");
accessToken.setExpireTime(DateUtils.getCurrentDate());
accessToken.setUpdateTime(DateUtils.getCurrentDate());
accessTokenMapper.updateById(accessToken);
int status = accessTokenMapper.updateById(accessToken);
if (status != 1) {
Assert.fail("update access token fail");
}
AccessToken resultAccessToken = accessTokenMapper.selectById(accessToken.getId());
assertEquals(accessToken, resultAccessToken);
}
@ -149,11 +157,14 @@ public class AccessTokenMapperTest {
* test delete
*/
@Test
public void testDelete() throws Exception{
public void testDelete() throws Exception {
Integer userId = 1;
AccessToken accessToken = createAccessToken(userId);
accessTokenMapper.deleteById(accessToken.getId());
int status = accessTokenMapper.deleteById(accessToken.getId());
if (status != 1) {
Assert.fail("delete access token data fail");
}
AccessToken resultAccessToken =
accessTokenMapper.selectById(accessToken.getId());
@ -163,21 +174,22 @@ public class AccessTokenMapperTest {
/**
* create accessTokens
* @param count create accessToken count
*
* @param count create accessToken count
* @param userName username
* @return accessToken map
* @throws Exception
*/
private Map<Integer,AccessToken> createAccessTokens(
Integer count,String userName) throws Exception{
private Map<Integer, AccessToken> createAccessTokens(
Integer count, String userName) throws Exception {
User user = createUser(userName);
Map<Integer,AccessToken> accessTokenMap = new HashMap<>();
for (int i = 1 ; i<= count ; i++){
AccessToken accessToken = createAccessToken(user.getId(),userName);
Map<Integer, AccessToken> accessTokenMap = new HashMap<>();
for (int i = 1; i <= count; i++) {
AccessToken accessToken = createAccessToken(user.getId(), userName);
accessTokenMap.put(accessToken.getId(),accessToken);
accessTokenMap.put(accessToken.getId(), accessToken);
}
return accessTokenMap;
@ -185,11 +197,12 @@ public class AccessTokenMapperTest {
/**
* create user
*
* @param userName userName
* @return user
* @throws Exception
*/
private User createUser(String userName) throws Exception{
private User createUser(String userName) throws Exception {
User user = new User();
user.setUserName(userName);
user.setUserPassword("123");
@ -201,42 +214,50 @@ public class AccessTokenMapperTest {
user.setUpdateTime(DateUtils.getCurrentDate());
user.setQueue("default");
userMapper.insert(user);
int status = userMapper.insert(user);
if (status != 1) {
Assert.fail("insert user data error");
}
return user;
}
/**
* create access token
* @param userId userId
*
* @param userId userId
* @param userName userName
* @return accessToken
* @throws Exception
*/
private AccessToken createAccessToken(Integer userId,String userName)throws Exception{
Random random = new Random();
private AccessToken createAccessToken(Integer userId, String userName) throws Exception {
//insertOne
AccessToken accessToken = new AccessToken();
accessToken.setUserName(userName);
accessToken.setUserId(userId);
accessToken.setToken(String.valueOf(random.nextLong()));
accessToken.setToken(String.valueOf(ThreadLocalRandom.current().nextLong()));
accessToken.setCreateTime(DateUtils.getCurrentDate());
accessToken.setUpdateTime(DateUtils.getCurrentDate());
accessToken.setExpireTime(DateUtils.getCurrentDate());
accessTokenMapper.insert(accessToken);
int status = accessTokenMapper.insert(accessToken);
if (status != 1) {
Assert.fail("insert data error");
}
return accessToken;
}
/**
* create access token
*
* @param userId userId
* @return accessToken
* @throws Exception
*/
private AccessToken createAccessToken(Integer userId)throws Exception{
return createAccessToken(userId,null);
private AccessToken createAccessToken(Integer userId) throws Exception {
return createAccessToken(userId, null);
}
}

85
dolphinscheduler-dao/src/test/java/org/apache/dolphinscheduler/dao/mapper/ResourceMapperTest.java

@ -65,9 +65,10 @@ public class ResourceMapperTest {
/**
* insert
*
* @return Resource
*/
private Resource insertOne(){
private Resource insertOne() {
//insertOne
Resource resource = new Resource();
resource.setAlias("ut-resource");
@ -76,16 +77,20 @@ public class ResourceMapperTest {
resource.setDirectory(false);
resource.setType(ResourceType.FILE);
resource.setUserId(111);
resourceMapper.insert(resource);
int status = resourceMapper.insert(resource);
if (status != 1) {
Assert.fail("insert data error");
}
return resource;
}
/**
* create resource by user
*
* @param user user
* @return Resource
*/
private Resource createResource(User user,boolean isDirectory,ResourceType resourceType,int pid,String alias,String fullName){
private Resource createResource(User user, boolean isDirectory, ResourceType resourceType, int pid, String alias, String fullName) {
//insertOne
Resource resource = new Resource();
resource.setDirectory(isDirectory);
@ -93,19 +98,23 @@ public class ResourceMapperTest {
resource.setAlias(alias);
resource.setFullName(fullName);
resource.setUserId(user.getId());
resourceMapper.insert(resource);
int status = resourceMapper.insert(resource);
if (status != 1) {
Assert.fail("insert data error");
}
return resource;
}
/**
* create resource by user
*
* @param user user
* @return Resource
*/
private Resource createResource(User user){
private Resource createResource(User user) {
//insertOne
String alias = String.format("ut-resource-%s",user.getUserName());
String fullName = String.format("/%s",alias);
String alias = String.format("ut-resource-%s", user.getUserName());
String fullName = String.format("/%s", alias);
Resource resource = createResource(user, false, ResourceType.FILE, -1, alias, fullName);
return resource;
@ -113,9 +122,10 @@ public class ResourceMapperTest {
/**
* create user
*
* @return User
*/
private User createGeneralUser(String userName){
private User createGeneralUser(String userName) {
User user = new User();
user.setUserName(userName);
user.setUserPassword("1");
@ -124,15 +134,20 @@ public class ResourceMapperTest {
user.setCreateTime(new Date());
user.setTenantId(1);
user.setUpdateTime(new Date());
userMapper.insert(user);
int status = userMapper.insert(user);
if (status != 1) {
Assert.fail("insert data error");
}
return user;
}
/**
* create resource user
*
* @return ResourcesUser
*/
private ResourcesUser createResourcesUser(Resource resource,User user){
private ResourcesUser createResourcesUser(Resource resource, User user) {
//insertOne
ResourcesUser resourcesUser = new ResourcesUser();
resourcesUser.setCreateTime(new Date());
@ -145,16 +160,17 @@ public class ResourceMapperTest {
}
@Test
public void testInsert(){
public void testInsert() {
Resource resource = insertOne();
assertNotNull(resource.getId());
assertThat(resource.getId(),greaterThan(0));
assertThat(resource.getId(), greaterThan(0));
}
/**
* test update
*/
@Test
public void testUpdate(){
public void testUpdate() {
//insertOne
Resource resource = insertOne();
resource.setCreateTime(new Date());
@ -167,7 +183,7 @@ public class ResourceMapperTest {
* test delete
*/
@Test
public void testDelete(){
public void testDelete() {
Resource resourceMap = insertOne();
int delete = resourceMapper.deleteById(resourceMap.getId());
Assert.assertEquals(1, delete);
@ -294,19 +310,31 @@ public class ResourceMapperTest {
Tenant tenant = new Tenant();
tenant.setTenantName("ut tenant ");
tenant.setTenantCode("ut tenant code for resource");
tenantMapper.insert(tenant);
int tenantInsertStatus = tenantMapper.insert(tenant);
if (tenantInsertStatus != 1) {
Assert.fail("insert tenant data error");
}
User user = new User();
user.setTenantId(tenant.getId());
user.setUserName("ut user");
userMapper.insert(user);
int userInsertStatus = userMapper.insert(user);
if (userInsertStatus != 1) {
Assert.fail("insert user data error");
}
Resource resource = insertOne();
resource.setUserId(user.getId());
resourceMapper.updateById(resource);
int userUpdateStatus = resourceMapper.updateById(resource);
if (userUpdateStatus != 1) {
Assert.fail("update user data error");
}
String resource1 = resourceMapper.queryTenantCodeByResourceName(
resource.getFullName(),ResourceType.FILE.ordinal()
resource.getFullName(), ResourceType.FILE.ordinal()
);
@ -315,7 +343,7 @@ public class ResourceMapperTest {
}
@Test
public void testListAuthorizedResource(){
public void testListAuthorizedResource() {
// create a general user
User generalUser1 = createGeneralUser("user1");
User generalUser2 = createGeneralUser("user2");
@ -328,20 +356,19 @@ public class ResourceMapperTest {
List<Resource> resources = resourceMapper.listAuthorizedResource(generalUser2.getId(), resNames);
Assert.assertEquals(generalUser2.getId(),resource.getUserId());
Assert.assertEquals(generalUser2.getId(), resource.getUserId());
Assert.assertFalse(resources.stream().map(t -> t.getFullName()).collect(toList()).containsAll(Arrays.asList(resNames)));
// authorize object unauthorizedResource to generalUser
createResourcesUser(unauthorizedResource,generalUser2);
createResourcesUser(unauthorizedResource, generalUser2);
List<Resource> authorizedResources = resourceMapper.listAuthorizedResource(generalUser2.getId(), resNames);
Assert.assertTrue(authorizedResources.stream().map(t -> t.getFullName()).collect(toList()).containsAll(Arrays.asList(resNames)));
}
@Test
public void deleteIdsTest(){
public void deleteIdsTest() {
// create a general user
User generalUser1 = createGeneralUser("user1");
@ -352,11 +379,11 @@ public class ResourceMapperTest {
resourceList.add(resource.getId());
resourceList.add(resource1.getId());
int result = resourceMapper.deleteIds(resourceList.toArray(new Integer[resourceList.size()]));
Assert.assertEquals(result,2);
Assert.assertEquals(result, 2);
}
@Test
public void queryResourceListAuthoredTest(){
public void queryResourceListAuthoredTest() {
// create a general user
User generalUser1 = createGeneralUser("user1");
User generalUser2 = createGeneralUser("user2");
@ -372,16 +399,18 @@ public class ResourceMapperTest {
}
@Test
public void batchUpdateResourceTest(){
public void batchUpdateResourceTest() {
// create a general user
User generalUser1 = createGeneralUser("user1");
// create resource
Resource resource = createResource(generalUser1);
resource.setFullName(String.format("%s-update",resource.getFullName()));
resource.setFullName(String.format("%s-update", resource.getFullName()));
resource.setUpdateTime(new Date());
List<Resource> resourceList = new ArrayList<>();
resourceList.add(resource);
int result = resourceMapper.batchUpdateResource(resourceList);
Assert.assertTrue(result>0);
if (result != resourceList.size()) {
Assert.fail("batch update resource data error");
}
}
}

1
dolphinscheduler-remote/src/main/java/org/apache/dolphinscheduler/remote/codec/NettyDecoder.java

@ -75,6 +75,7 @@ public class NettyDecoder extends ReplayingDecoder<NettyDecoder.State> {
out.add(packet);
//
checkpoint(State.MAGIC);
break;
default:
logger.warn("unknown decoder state {}", state());
}

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/builder/TaskExecutionContextBuilder.java

@ -40,6 +40,7 @@ public class TaskExecutionContextBuilder {
public TaskExecutionContextBuilder buildTaskInstanceRelatedInfo(TaskInstance taskInstance){
taskExecutionContext.setTaskInstanceId(taskInstance.getId());
taskExecutionContext.setTaskName(taskInstance.getName());
taskExecutionContext.setFirstSubmitTime(taskInstance.getFirstSubmitTime());
taskExecutionContext.setStartTime(taskInstance.getStartTime());
taskExecutionContext.setTaskType(taskInstance.getTaskType());
taskExecutionContext.setLogPath(taskInstance.getLogPath());
@ -48,6 +49,7 @@ public class TaskExecutionContextBuilder {
taskExecutionContext.setWorkerGroup(taskInstance.getWorkerGroup());
taskExecutionContext.setHost(taskInstance.getHost());
taskExecutionContext.setResources(taskInstance.getResources());
taskExecutionContext.setDelayTime(taskInstance.getDelayTime());
return this;
}

171
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/entity/TaskExecutionContext.java

@ -17,7 +17,7 @@
package org.apache.dolphinscheduler.server.entity;
import com.fasterxml.jackson.annotation.JsonFormat;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.TaskExecuteRequestCommand;
import org.apache.dolphinscheduler.remote.utils.JsonSerializer;
@ -26,30 +26,38 @@ import java.io.Serializable;
import java.util.Date;
import java.util.Map;
import com.fasterxml.jackson.annotation.JsonFormat;
/**
* master/worker task transport
* master/worker task transport
*/
public class TaskExecutionContext implements Serializable{
public class TaskExecutionContext implements Serializable {
/**
* task id
* task id
*/
private int taskInstanceId;
/**
* task name
* task name
*/
private String taskName;
/**
* task start time
* task first submit time.
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date firstSubmitTime;
/**
* task start time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date startTime;
/**
* task type
* task type
*/
private String taskType;
@ -57,9 +65,9 @@ public class TaskExecutionContext implements Serializable{
* host
*/
private String host;
/**
* task execute path
* task execute path
*/
private String executePath;
@ -69,7 +77,7 @@ public class TaskExecutionContext implements Serializable{
private String logPath;
/**
* task json
* task json
*/
private String taskJson;
@ -84,53 +92,53 @@ public class TaskExecutionContext implements Serializable{
private String appIds;
/**
* process instance id
* process instance id
*/
private int processInstanceId;
/**
* process instance schedule time
* process instance schedule time
*/
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date scheduleTime;
/**
* process instance global parameters
* process instance global parameters
*/
private String globalParams;
/**
* execute user id
* execute user id
*/
private int executorId;
/**
* command type if complement
* command type if complement
*/
private int cmdTypeIfComplement;
/**
* tenant code
* tenant code
*/
private String tenantCode;
/**
* task queue
* task queue
*/
private String queue;
/**
* process define id
* process define id
*/
private int processDefineId;
/**
* project id
* project id
*/
private int projectId;
@ -140,12 +148,12 @@ public class TaskExecutionContext implements Serializable{
private String taskParams;
/**
* envFile
* envFile
*/
private String envFile;
/**
* definedParams
* definedParams
*/
private Map<String, String> definedParams;
@ -155,7 +163,7 @@ public class TaskExecutionContext implements Serializable{
private String taskAppId;
/**
* task timeout strategy
* task timeout strategy
*/
private int taskTimeoutStrategy;
@ -169,18 +177,28 @@ public class TaskExecutionContext implements Serializable{
*/
private String workerGroup;
/**
* delay execution time.
*/
private int delayTime;
/**
* current execution status
*/
private ExecutionStatus currentExecutionStatus;
/**
* resources full name and tenant code
*/
private Map<String,String> resources;
private Map<String, String> resources;
/**
* sql TaskExecutionContext
* sql TaskExecutionContext
*/
private SQLTaskExecutionContext sqlTaskExecutionContext;
/**
* datax TaskExecutionContext
* datax TaskExecutionContext
*/
private DataxTaskExecutionContext dataxTaskExecutionContext;
@ -195,7 +213,7 @@ public class TaskExecutionContext implements Serializable{
private SqoopTaskExecutionContext sqoopTaskExecutionContext;
/**
* procedure TaskExecutionContext
* procedure TaskExecutionContext
*/
private ProcedureTaskExecutionContext procedureTaskExecutionContext;
@ -215,6 +233,14 @@ public class TaskExecutionContext implements Serializable{
this.taskName = taskName;
}
public Date getFirstSubmitTime() {
return firstSubmitTime;
}
public void setFirstSubmitTime(Date firstSubmitTime) {
this.firstSubmitTime = firstSubmitTime;
}
public Date getStartTime() {
return startTime;
}
@ -407,6 +433,22 @@ public class TaskExecutionContext implements Serializable{
this.workerGroup = workerGroup;
}
public int getDelayTime() {
return delayTime;
}
public void setDelayTime(int delayTime) {
this.delayTime = delayTime;
}
public ExecutionStatus getCurrentExecutionStatus() {
return currentExecutionStatus;
}
public void setCurrentExecutionStatus(ExecutionStatus currentExecutionStatus) {
this.currentExecutionStatus = currentExecutionStatus;
}
public SQLTaskExecutionContext getSqlTaskExecutionContext() {
return sqlTaskExecutionContext;
}
@ -431,7 +473,7 @@ public class TaskExecutionContext implements Serializable{
this.procedureTaskExecutionContext = procedureTaskExecutionContext;
}
public Command toCommand(){
public Command toCommand() {
TaskExecuteRequestCommand requestCommand = new TaskExecuteRequestCommand();
requestCommand.setTaskExecutionContext(JsonSerializer.serializeToString(this));
return requestCommand.convert2Command();
@ -463,39 +505,42 @@ public class TaskExecutionContext implements Serializable{
@Override
public String toString() {
return "TaskExecutionContext{" +
"taskInstanceId=" + taskInstanceId +
", taskName='" + taskName + '\'' +
", startTime=" + startTime +
", taskType='" + taskType + '\'' +
", host='" + host + '\'' +
", executePath='" + executePath + '\'' +
", logPath='" + logPath + '\'' +
", taskJson='" + taskJson + '\'' +
", processId=" + processId +
", appIds='" + appIds + '\'' +
", processInstanceId=" + processInstanceId +
", scheduleTime=" + scheduleTime +
", globalParams='" + globalParams + '\'' +
", executorId=" + executorId +
", cmdTypeIfComplement=" + cmdTypeIfComplement +
", tenantCode='" + tenantCode + '\'' +
", queue='" + queue + '\'' +
", processDefineId=" + processDefineId +
", projectId=" + projectId +
", taskParams='" + taskParams + '\'' +
", envFile='" + envFile + '\'' +
", definedParams=" + definedParams +
", taskAppId='" + taskAppId + '\'' +
", taskTimeoutStrategy=" + taskTimeoutStrategy +
", taskTimeout=" + taskTimeout +
", workerGroup='" + workerGroup + '\'' +
", resources=" + resources +
", sqlTaskExecutionContext=" + sqlTaskExecutionContext +
", dataxTaskExecutionContext=" + dataxTaskExecutionContext +
", dependenceTaskExecutionContext=" + dependenceTaskExecutionContext +
", sqoopTaskExecutionContext=" + sqoopTaskExecutionContext +
", procedureTaskExecutionContext=" + procedureTaskExecutionContext +
'}';
return "TaskExecutionContext{"
+ "taskInstanceId=" + taskInstanceId
+ ", taskName='" + taskName + '\''
+ ", currentExecutionStatus=" + currentExecutionStatus
+ ", firstSubmitTime=" + firstSubmitTime
+ ", startTime=" + startTime
+ ", taskType='" + taskType + '\''
+ ", host='" + host + '\''
+ ", executePath='" + executePath + '\''
+ ", logPath='" + logPath + '\''
+ ", taskJson='" + taskJson + '\''
+ ", processId=" + processId
+ ", appIds='" + appIds + '\''
+ ", processInstanceId=" + processInstanceId
+ ", scheduleTime=" + scheduleTime
+ ", globalParams='" + globalParams + '\''
+ ", executorId=" + executorId
+ ", cmdTypeIfComplement=" + cmdTypeIfComplement
+ ", tenantCode='" + tenantCode + '\''
+ ", queue='" + queue + '\''
+ ", processDefineId=" + processDefineId
+ ", projectId=" + projectId
+ ", taskParams='" + taskParams + '\''
+ ", envFile='" + envFile + '\''
+ ", definedParams=" + definedParams
+ ", taskAppId='" + taskAppId + '\''
+ ", taskTimeoutStrategy=" + taskTimeoutStrategy
+ ", taskTimeout=" + taskTimeout
+ ", workerGroup='" + workerGroup + '\''
+ ", delayTime=" + delayTime
+ ", resources=" + resources
+ ", sqlTaskExecutionContext=" + sqlTaskExecutionContext
+ ", dataxTaskExecutionContext=" + dataxTaskExecutionContext
+ ", dependenceTaskExecutionContext=" + dependenceTaskExecutionContext
+ ", sqoopTaskExecutionContext=" + sqoopTaskExecutionContext
+ ", procedureTaskExecutionContext=" + procedureTaskExecutionContext
+ '}';
}
}

21
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/future/TaskFuture.java

@ -28,6 +28,7 @@ import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
/**
* task fulture
@ -55,11 +56,11 @@ public class TaskFuture {
/**
* response command
*/
private volatile Command responseCommand;
private AtomicReference<Command> responseCommandReference = new AtomicReference<>();
private volatile boolean sendOk = true;
private volatile Throwable cause;
private AtomicReference<Throwable> causeReference;
public TaskFuture(long opaque, long timeoutMillis) {
this.opaque = opaque;
@ -74,7 +75,7 @@ public class TaskFuture {
*/
public Command waitResponse() throws InterruptedException {
this.latch.await(timeoutMillis, TimeUnit.MILLISECONDS);
return this.responseCommand;
return this.responseCommandReference.get();
}
/**
@ -83,7 +84,7 @@ public class TaskFuture {
* @param responseCommand responseCommand
*/
public void putResponse(final Command responseCommand) {
this.responseCommand = responseCommand;
responseCommandReference.set(responseCommand);
this.latch.countDown();
FUTURE_TABLE.remove(opaque);
}
@ -114,11 +115,11 @@ public class TaskFuture {
}
public void setCause(Throwable cause) {
this.cause = cause;
causeReference.set(cause);
}
public Throwable getCause() {
return cause;
return causeReference.get();
}
public long getOpaque() {
@ -134,11 +135,11 @@ public class TaskFuture {
}
public Command getResponseCommand() {
return responseCommand;
return responseCommandReference.get();
}
public void setResponseCommand(Command responseCommand) {
this.responseCommand = responseCommand;
responseCommandReference.set(responseCommand);
}
@ -166,9 +167,9 @@ public class TaskFuture {
", timeoutMillis=" + timeoutMillis +
", latch=" + latch +
", beginTimestamp=" + beginTimestamp +
", responseCommand=" + responseCommand +
", responseCommand=" + responseCommandReference.get() +
", sendOk=" + sendOk +
", cause=" + cause +
", cause=" + causeReference.get() +
'}';
}
}

1
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseService.java

@ -108,6 +108,7 @@ public class TaskResponseService {
TaskResponseEvent taskResponseEvent = eventQueue.take();
persist(taskResponseEvent);
} catch (InterruptedException e){
Thread.currentThread().interrupt();
break;
} catch (Exception e){
logger.error("persist task error",e);

1
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/ConditionsTaskExecThread.java

@ -60,6 +60,7 @@ public class ConditionsTaskExecThread extends MasterBaseTaskExecThread {
*/
public ConditionsTaskExecThread(TaskInstance taskInstance) {
super(taskInstance);
taskInstance.setStartTime(new Date());
}
@Override

1
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/DependentTaskExecThread.java

@ -64,6 +64,7 @@ public class DependentTaskExecThread extends MasterBaseTaskExecThread {
*/
public DependentTaskExecThread(TaskInstance taskInstance) {
super(taskInstance);
taskInstance.setStartTime(new Date());
}

21
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterBaseTaskExecThread.java

@ -16,11 +16,11 @@
*/
package org.apache.dolphinscheduler.server.master.runner;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender;
import static org.apache.dolphinscheduler.common.Constants.UNDERLINE;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
@ -30,11 +30,14 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.queue.TaskPriorityQueue;
import org.apache.dolphinscheduler.service.queue.TaskPriorityQueueImpl;
import java.util.concurrent.Callable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.dolphinscheduler.common.Constants.*;
import java.util.concurrent.Callable;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender;
/**
@ -171,9 +174,10 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
logger.info(String.format("submit task , but task [%s] state [%s] is already finished. ", taskInstance.getName(), taskInstance.getState().toString()));
return true;
}
// task cannot submit when running
if(taskInstance.getState() == ExecutionStatus.RUNNING_EXECUTION){
logger.info(String.format("submit to task, but task [%s] state already be running. ", taskInstance.getName()));
// task cannot be submitted because its execution state is RUNNING or DELAY.
if (taskInstance.getState() == ExecutionStatus.RUNNING_EXECUTION
|| taskInstance.getState() == ExecutionStatus.DELAY_EXECUTION) {
logger.info("submit task, but the status of the task {} is already running or delayed.", taskInstance.getName());
return true;
}
logger.info("task ready to submit: {}", taskInstance);
@ -272,5 +276,4 @@ public class MasterBaseTaskExecThread implements Callable<Boolean> {
return logPath;
}
}

54
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterExecThread.java

@ -16,10 +16,21 @@
*/
package org.apache.dolphinscheduler.server.master.runner;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_RECOVERY_START_NODE_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_START_NODE_NAMES;
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP;
import static org.apache.dolphinscheduler.common.Constants.SEC_2_MINUTES_TIME_UNIT;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.DependResult;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.Priority;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.graph.DAG;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.model.TaskNodeRelation;
@ -27,7 +38,13 @@ import org.apache.dolphinscheduler.common.process.ProcessDag;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
@ -37,17 +54,26 @@ import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.utils.AlertManager;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.io.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import static org.apache.dolphinscheduler.common.Constants.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.collect.Lists;
/**
* master exec thread,split dag
@ -470,9 +496,6 @@ public class MasterExecThread implements Runnable {
// task instance whether alert
taskInstance.setAlertFlag(Flag.NO);
// task instance start time
taskInstance.setStartTime(new Date());
// task instance flag
taskInstance.setFlag(Flag.YES);
@ -501,6 +524,8 @@ public class MasterExecThread implements Runnable {
taskInstance.setWorkerGroup(taskWorkerGroup);
}
// delay execution time
taskInstance.setDelayTime(taskNode.getDelayTime());
}
return taskInstance;
}
@ -719,9 +744,10 @@ public class MasterExecThread implements Runnable {
* @return ExecutionStatus
*/
private ExecutionStatus runningState(ExecutionStatus state){
if(state == ExecutionStatus.READY_STOP ||
state == ExecutionStatus.READY_PAUSE ||
state == ExecutionStatus.WAITTING_THREAD){
if (state == ExecutionStatus.READY_STOP
|| state == ExecutionStatus.READY_PAUSE
|| state == ExecutionStatus.WAITTING_THREAD
|| state == ExecutionStatus.DELAY_EXECUTION) {
// if the running task is not completed, the state remains unchanged
return state;
}else{

17
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/master/runner/MasterTaskExecThread.java

@ -24,6 +24,8 @@ import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
@ -39,7 +41,6 @@ import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import java.util.Date;
import java.util.Set;
import org.apache.dolphinscheduler.common.utils.*;
/**
@ -150,7 +151,7 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
break;
}
if(checkTimeout){
long remainTime = getRemaintime(taskTimeoutParameter.getInterval() * 60L);
long remainTime = DateUtils.getRemainTime(taskInstance.getStartTime(), taskTimeoutParameter.getInterval() * 60L);
if (remainTime < 0) {
logger.warn("task id: {} execution time out",taskInstance.getId());
// process define
@ -256,16 +257,4 @@ public class MasterTaskExecThread extends MasterBaseTaskExecThread {
TaskNode taskNode = JSONUtils.parseObject(taskJson, TaskNode.class);
return taskNode.getTaskTimeoutParameter();
}
/**
* get remain time?s?
*
* @return remain time
*/
private long getRemaintime(long timeoutSeconds) {
Date startTime = taskInstance.getStartTime();
long usedTime = (System.currentTimeMillis() - startTime.getTime()) / 1000;
return timeoutSeconds - usedTime;
}
}

4
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/registry/ZookeeperNodeManager.java

@ -151,10 +151,10 @@ public class ZookeeperNodeManager implements InitializingBean {
private String parseGroup(String path){
String[] parts = path.split("\\/");
if(parts.length != 6){
if (parts.length < 6) {
throw new IllegalArgumentException(String.format("worker group path : %s is not valid, ignore", path));
}
String group = parts[4];
String group = parts[parts.length - 2];
return group;
}
}

109
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/utils/AlertManager.java

@ -14,29 +14,30 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.utils;
package org.apache.dolphinscheduler.server.utils;
import org.apache.dolphinscheduler.common.enums.AlertType;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ShowType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.AlertDao;
import org.apache.dolphinscheduler.dao.DaoFactory;
import org.apache.dolphinscheduler.dao.entity.Alert;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* alert manager
*/
@ -50,8 +51,7 @@ public class AlertManager {
/**
* alert dao
*/
private AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class);
private final AlertDao alertDao = DaoFactory.getDaoInstance(AlertDao.class);
/**
* command type convert chinese
@ -86,50 +86,37 @@ public class AlertManager {
}
}
/**
* process instance format
*/
private static final String PROCESS_INSTANCE_FORMAT =
"\"id:%d\"," +
"\"name:%s\"," +
"\"job type: %s\"," +
"\"state: %s\"," +
"\"recovery:%s\"," +
"\"run time: %d\"," +
"\"start time: %s\"," +
"\"end time: %s\"," +
"\"host: %s\"" ;
/**
* get process instance content
* @param processInstance process instance
* @param taskInstances task instance list
*
* @param processInstance process instance
* @param taskInstances task instance list
* @return process instance format content
*/
public String getContentProcessInstance(ProcessInstance processInstance,
List<TaskInstance> taskInstances){
List<TaskInstance> taskInstances) {
String res = "";
if(processInstance.getState().typeIsSuccess()){
res = String.format(PROCESS_INSTANCE_FORMAT,
processInstance.getId(),
processInstance.getName(),
getCommandCnName(processInstance.getCommandType()),
processInstance.getState().toString(),
processInstance.getRecovery().toString(),
processInstance.getRunTimes(),
DateUtils.dateToString(processInstance.getStartTime()),
DateUtils.dateToString(processInstance.getEndTime()),
processInstance.getHost()
);
res = "[" + res + "]";
}else if(processInstance.getState().typeIsFailure()){
if (processInstance.getState().typeIsSuccess()) {
List<LinkedHashMap> successTaskList = new ArrayList<>(1);
LinkedHashMap<String, String> successTaskMap = new LinkedHashMap();
successTaskMap.put("id", String.valueOf(processInstance.getId()));
successTaskMap.put("name", processInstance.getName());
successTaskMap.put("job type", getCommandCnName(processInstance.getCommandType()));
successTaskMap.put("state", processInstance.getState().toString());
successTaskMap.put("recovery", processInstance.getRecovery().toString());
successTaskMap.put("run time", String.valueOf(processInstance.getRunTimes()));
successTaskMap.put("start time", DateUtils.dateToString(processInstance.getStartTime()));
successTaskMap.put("end time", DateUtils.dateToString(processInstance.getEndTime()));
successTaskMap.put("host", processInstance.getHost());
successTaskList.add(successTaskMap);
res = JSONUtils.toJsonString(successTaskList);
} else if (processInstance.getState().typeIsFailure()) {
List<LinkedHashMap> failedTaskList = new ArrayList<>();
for(TaskInstance task : taskInstances){
if(task.getState().typeIsSuccess()){
for (TaskInstance task : taskInstances) {
if (task.getState().typeIsSuccess()) {
continue;
}
LinkedHashMap<String, String> failedTaskMap = new LinkedHashMap();
@ -154,15 +141,15 @@ public class AlertManager {
/**
* getting worker fault tolerant content
*
* @param processInstance process instance
* @param processInstance process instance
* @param toleranceTaskList tolerance task list
* @return worker tolerance content
*/
private String getWorkerToleranceContent(ProcessInstance processInstance, List<TaskInstance> toleranceTaskList){
private String getWorkerToleranceContent(ProcessInstance processInstance, List<TaskInstance> toleranceTaskList) {
List<LinkedHashMap<String, String>> toleranceTaskInstanceList = new ArrayList<>();
List<LinkedHashMap<String, String>> toleranceTaskInstanceList = new ArrayList<>();
for(TaskInstance taskInstance: toleranceTaskList){
for (TaskInstance taskInstance : toleranceTaskList) {
LinkedHashMap<String, String> toleranceWorkerContentMap = new LinkedHashMap();
toleranceWorkerContentMap.put("process name", processInstance.getName());
toleranceWorkerContentMap.put("task name", taskInstance.getName());
@ -176,11 +163,11 @@ public class AlertManager {
/**
* send worker alert fault tolerance
*
* @param processInstance process instance
* @param processInstance process instance
* @param toleranceTaskList tolerance task list
*/
public void sendAlertWorkerToleranceFault(ProcessInstance processInstance, List<TaskInstance> toleranceTaskList){
try{
public void sendAlertWorkerToleranceFault(ProcessInstance processInstance, List<TaskInstance> toleranceTaskList) {
try {
Alert alert = new Alert();
alert.setTitle("worker fault tolerance");
alert.setShowType(ShowType.TABLE);
@ -188,13 +175,13 @@ public class AlertManager {
alert.setContent(content);
alert.setAlertType(AlertType.EMAIL);
alert.setCreateTime(new Date());
alert.setAlertGroupId(processInstance.getWarningGroupId() == null ? 1:processInstance.getWarningGroupId());
alert.setAlertGroupId(processInstance.getWarningGroupId() == null ? 1 : processInstance.getWarningGroupId());
alert.setReceivers(processInstance.getProcessDefinition().getReceivers());
alert.setReceiversCc(processInstance.getProcessDefinition().getReceiversCc());
alertDao.addAlert(alert);
logger.info("add alert to db , alert : {}", alert.toString());
}catch (Exception e){
} catch (Exception e) {
logger.error("send alert failed:{} ", e.getMessage());
}
@ -202,40 +189,40 @@ public class AlertManager {
/**
* send process instance alert
* @param processInstance process instance
* @param taskInstances task instance list
*
* @param processInstance process instance
* @param taskInstances task instance list
*/
public void sendAlertProcessInstance(ProcessInstance processInstance,
List<TaskInstance> taskInstances){
List<TaskInstance> taskInstances) {
boolean sendWarnning = false;
WarningType warningType = processInstance.getWarningType();
switch (warningType){
switch (warningType) {
case ALL:
if(processInstance.getState().typeIsFinished()){
if (processInstance.getState().typeIsFinished()) {
sendWarnning = true;
}
break;
case SUCCESS:
if(processInstance.getState().typeIsSuccess()){
if (processInstance.getState().typeIsSuccess()) {
sendWarnning = true;
}
break;
case FAILURE:
if(processInstance.getState().typeIsFailure()){
if (processInstance.getState().typeIsFailure()) {
sendWarnning = true;
}
break;
default:
default:
}
if(!sendWarnning){
if (!sendWarnning) {
return;
}
Alert alert = new Alert();
String cmdName = getCommandCnName(processInstance.getCommandType());
String success = processInstance.getState().typeIsSuccess() ? "success" :"failed";
String success = processInstance.getState().typeIsSuccess() ? "success" : "failed";
alert.setTitle(cmdName + " " + success);
ShowType showType = processInstance.getState().typeIsSuccess() ? ShowType.TEXT : ShowType.TABLE;
alert.setShowType(showType);
@ -254,7 +241,7 @@ public class AlertManager {
/**
* send process timeout alert
*
* @param processInstance process instance
* @param processInstance process instance
* @param processDefinition process definition
*/
public void sendProcessTimeoutAlert(ProcessInstance processInstance, ProcessDefinition processDefinition) {

39
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/processor/TaskExecuteProcessor.java

@ -18,15 +18,17 @@
package org.apache.dolphinscheduler.server.worker.processor;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender;
import com.github.rholder.retry.RetryException;
import io.netty.channel.Channel;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.FileUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.NetUtils;
import org.apache.dolphinscheduler.common.utils.Preconditions;
import org.apache.dolphinscheduler.common.utils.RetryerUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.CommandType;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
@ -38,14 +40,21 @@ import org.apache.dolphinscheduler.server.log.TaskLogDiscriminator;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.runner.TaskExecuteThread;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.Optional;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.rholder.retry.RetryException;
import ch.qos.logback.classic.LoggerContext;
import ch.qos.logback.classic.sift.SiftingAppender;
import io.netty.channel.Channel;
/**
* worker request processor
*/
@ -97,7 +106,7 @@ public class TaskExecuteProcessor implements NettyRequestProcessor {
return;
}
taskExecutionContext.setHost(NetUtils.getHost() + ":" + workerConfig.getListenPort());
taskExecutionContext.setHost(NetUtils.getHost() + ":" + workerConfig.getListenPort());
// custom logger
Logger taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(LoggerUtils.TASK_LOGGER_INFO_PREFIX,
@ -122,7 +131,15 @@ public class TaskExecuteProcessor implements NettyRequestProcessor {
taskCallbackService.addRemoteChannel(taskExecutionContext.getTaskInstanceId(),
new NettyRemoteChannel(channel, command.getOpaque()));
// tell master that task is in executing
if (DateUtils.getRemainTime(taskExecutionContext.getFirstSubmitTime(), taskExecutionContext.getDelayTime() * 60L) > 0) {
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.DELAY_EXECUTION);
taskExecutionContext.setStartTime(null);
} else {
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
taskExecutionContext.setStartTime(new Date());
}
// tell master the status of this task (RUNNING_EXECUTION or DELAY_EXECUTION)
final Command ackCommand = buildAckCommand(taskExecutionContext).convert2Command();
try {
@ -167,10 +184,10 @@ public class TaskExecuteProcessor implements NettyRequestProcessor {
private TaskExecuteAckCommand buildAckCommand(TaskExecutionContext taskExecutionContext) {
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
ackCommand.setStatus(ExecutionStatus.RUNNING_EXECUTION.getCode());
ackCommand.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode());
ackCommand.setLogPath(getTaskLogPath(taskExecutionContext));
ackCommand.setHost(taskExecutionContext.getHost());
ackCommand.setStartTime(new Date());
ackCommand.setStartTime(taskExecutionContext.getStartTime());
if(taskExecutionContext.getTaskType().equals(TaskType.SQL.name()) || taskExecutionContext.getTaskType().equals(TaskType.PROCEDURE.name())){
ackCommand.setExecutePath(null);
}else{

98
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThread.java

@ -16,25 +16,20 @@
*/
package org.apache.dolphinscheduler.server.worker.runner;
import java.io.File;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import java.util.Set;
import org.apache.commons.collections.MapUtils;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.TaskTimeoutParameter;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.RetryerUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager;
@ -43,9 +38,23 @@ import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.server.worker.task.TaskManager;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.commons.collections.MapUtils;
import java.io.File;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.rholder.retry.RetryException;
/**
* task scheduler thread
@ -105,6 +114,15 @@ public class TaskExecuteThread implements Runnable {
// task node
TaskNode taskNode = JSONUtils.parseObject(taskExecutionContext.getTaskJson(), TaskNode.class);
delayExecutionIfNeeded();
if (taskExecutionContext.getStartTime() == null) {
taskExecutionContext.setStartTime(new Date());
}
if (taskExecutionContext.getCurrentExecutionStatus() != ExecutionStatus.RUNNING_EXECUTION) {
changeTaskExecutionStatusToRunning();
}
logger.info("the task begins to execute. task instance id: {}", taskExecutionContext.getTaskInstanceId());
// copy hdfs/minio file to local
downloadResource(taskExecutionContext.getExecutePath(),
taskExecutionContext.getResources(),
@ -138,7 +156,7 @@ public class TaskExecuteThread implements Runnable {
responseCommand.setProcessId(task.getProcessId());
responseCommand.setAppIds(task.getAppIds());
logger.info("task instance id : {},task final status : {}", taskExecutionContext.getTaskInstanceId(), task.getExitStatus());
}catch (Exception e){
} catch (Exception e) {
logger.error("task scheduler failure", e);
kill();
responseCommand.setStatus(ExecutionStatus.FAILURE.getCode());
@ -147,9 +165,10 @@ public class TaskExecuteThread implements Runnable {
responseCommand.setAppIds(task.getAppIds());
} finally {
try {
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.of(responseCommand.getStatus()));
taskExecutionContextCacheManager.removeByTaskInstanceId(taskExecutionContext.getTaskInstanceId());
taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command());
}catch (Exception e){
} catch (Exception e) {
ThreadUtils.sleep(Constants.SLEEP_TIME_MILLIS);
taskCallbackService.sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand.convert2Command());
}
@ -256,4 +275,59 @@ public class TaskExecuteThread implements Runnable {
}
}
}
/**
* delay execution if needed.
*/
private void delayExecutionIfNeeded() {
long remainTime = DateUtils.getRemainTime(taskExecutionContext.getFirstSubmitTime(),
taskExecutionContext.getDelayTime() * 60L);
logger.info("delay execution time: {} s", remainTime < 0 ? 0 : remainTime);
if (remainTime > 0) {
try {
Thread.sleep(remainTime * Constants.SLEEP_TIME_MILLIS);
} catch (Exception e) {
logger.error("delay task execution failure, the task will be executed directly. process instance id:{}, task instance id:{}",
taskExecutionContext.getProcessInstanceId(),
taskExecutionContext.getTaskInstanceId());
}
}
}
/**
* send an ack to change the status of the task.
*/
private void changeTaskExecutionStatusToRunning() {
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
Command ackCommand = buildAckCommand().convert2Command();
try {
RetryerUtils.retryCall(() -> {
taskCallbackService.sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand);
return Boolean.TRUE;
});
} catch (ExecutionException | RetryException e) {
logger.error(e.getMessage(), e);
}
}
/**
* build ack command.
*
* @return TaskExecuteAckCommand
*/
private TaskExecuteAckCommand buildAckCommand() {
TaskExecuteAckCommand ackCommand = new TaskExecuteAckCommand();
ackCommand.setTaskInstanceId(taskExecutionContext.getTaskInstanceId());
ackCommand.setStatus(taskExecutionContext.getCurrentExecutionStatus().getCode());
ackCommand.setStartTime(taskExecutionContext.getStartTime());
ackCommand.setLogPath(taskExecutionContext.getLogPath());
ackCommand.setHost(taskExecutionContext.getHost());
if (taskExecutionContext.getTaskType().equals(TaskType.SQL.name())
|| taskExecutionContext.getTaskType().equals(TaskType.PROCEDURE.name())) {
ackCommand.setExecutePath(null);
} else {
ackCommand.setExecutePath(taskExecutionContext.getExecutePath());
}
return ackCommand;
}
}

22
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/AbstractCommandExecutor.java

@ -16,12 +16,14 @@
*/
package org.apache.dolphinscheduler.server.worker.task;
import com.sun.jna.platform.win32.Kernel32;
import com.sun.jna.platform.win32.WinNT;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.thread.Stopper;
import org.apache.dolphinscheduler.common.thread.ThreadUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.HadoopUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.common.utils.OSUtils;
@ -32,9 +34,12 @@ import org.apache.dolphinscheduler.server.utils.ProcessUtils;
import org.apache.dolphinscheduler.server.worker.cache.TaskExecutionContextCacheManager;
import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.slf4j.Logger;
import java.io.*;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.lang.reflect.Field;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
@ -47,8 +52,10 @@ import java.util.function.Consumer;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_FAILURE;
import static org.apache.dolphinscheduler.common.Constants.EXIT_CODE_SUCCESS;
import org.slf4j.Logger;
import com.sun.jna.platform.win32.Kernel32;
import com.sun.jna.platform.win32.WinNT;
/**
* abstract command executor
@ -474,8 +481,7 @@ public abstract class AbstractCommandExecutor {
* @return remain time
*/
private long getRemaintime() {
long usedTime = (System.currentTimeMillis() - taskExecutionContext.getStartTime().getTime()) / 1000;
long remainTime = taskExecutionContext.getTaskTimeout() - usedTime;
long remainTime = DateUtils.getRemainTime(taskExecutionContext.getStartTime(), taskExecutionContext.getTaskTimeout());
if (remainTime < 0) {
throw new RuntimeException("task execution time out");

2
dolphinscheduler-server/src/main/java/org/apache/dolphinscheduler/server/worker/task/sql/SqlTask.java

@ -262,7 +262,7 @@ public class SqlTask extends AbstractTask {
while (rowCount < LIMIT && resultSet.next()) {
ObjectNode mapOfColValues = JSONUtils.createObjectNode();
for (int i = 1; i <= num; i++) {
mapOfColValues.set(md.getColumnName(i), JSONUtils.toJsonNode(resultSet.getObject(i)));
mapOfColValues.set(md.getColumnLabel(i), JSONUtils.toJsonNode(resultSet.getObject(i)));
}
resultJSONArray.add(mapOfColValues);
rowCount++;

171
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/ConditionsTaskTest.java

@ -16,14 +16,26 @@
*/
package org.apache.dolphinscheduler.server.master;
import org.apache.dolphinscheduler.common.enums.DependentRelation;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.TaskType;
import org.apache.dolphinscheduler.common.model.DependentItem;
import org.apache.dolphinscheduler.common.model.DependentTaskModel;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.task.conditions.ConditionsParameters;
import org.apache.dolphinscheduler.common.task.dependent.DependentParameters;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.master.runner.ConditionsTaskExecThread;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -34,99 +46,144 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import java.util.ArrayList;
import java.util.List;
@RunWith(MockitoJUnitRunner.Silent.class)
public class ConditionsTaskTest {
private static final Logger logger = LoggerFactory.getLogger(DependentTaskTest.class);
private ProcessService processService;
private ApplicationContext applicationContext;
/**
* TaskNode.runFlag : task can be run normally
*/
public static final String FLOWNODE_RUN_FLAG_NORMAL = "NORMAL";
private ProcessService processService;
private MasterConfig config;
private ProcessInstance processInstance;
@Before
public void before() {
config = new MasterConfig();
ApplicationContext applicationContext = Mockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext);
MasterConfig config = new MasterConfig();
Mockito.when(applicationContext.getBean(MasterConfig.class)).thenReturn(config);
config.setMasterTaskCommitRetryTimes(3);
config.setMasterTaskCommitInterval(1000);
processService = Mockito.mock(ProcessService.class);
applicationContext = Mockito.mock(ApplicationContext.class);
SpringApplicationContext springApplicationContext = new SpringApplicationContext();
springApplicationContext.setApplicationContext(applicationContext);
Mockito.when(applicationContext.getBean(ProcessService.class)).thenReturn(processService);
Mockito.when(applicationContext.getBean(MasterConfig.class)).thenReturn(config);
processInstance = getProcessInstance();
Mockito.when(processService
.findTaskInstanceById(252612))
.thenReturn(getTaskInstance());
.findProcessInstanceById(processInstance.getId()))
.thenReturn(processInstance);
}
Mockito.when(processService.saveTaskInstance(getTaskInstance()))
.thenReturn(true);
private TaskInstance testBasicInit(ExecutionStatus expectResult) {
TaskInstance taskInstance = getTaskInstance(getTaskNode(), processInstance);
Mockito.when(processService.findProcessInstanceById(10112))
.thenReturn(getProcessInstance());
// for MasterBaseTaskExecThread.submit
Mockito.when(processService
.submitTask(taskInstance))
.thenReturn(taskInstance);
// for MasterBaseTaskExecThread.call
Mockito.when(processService
.findTaskInstanceById(taskInstance.getId()))
.thenReturn(taskInstance);
// for ConditionsTaskExecThread.initTaskParameters
Mockito.when(processService
.saveTaskInstance(taskInstance))
.thenReturn(true);
// for ConditionsTaskExecThread.updateTaskState
Mockito.when(processService
.updateTaskInstance(taskInstance))
.thenReturn(true);
// for ConditionsTaskExecThread.waitTaskQuit
List<TaskInstance> conditions = Stream.of(
getTaskInstanceForValidTaskList(1001, "1", expectResult)
).collect(Collectors.toList());
Mockito.when(processService
.findValidTaskListByProcessId(10112))
.thenReturn(getTaskInstances());
.findValidTaskListByProcessId(processInstance.getId()))
.thenReturn(conditions);
return taskInstance;
}
@Test
public void testCondition(){
TaskInstance taskInstance = getTaskInstance();
String dependString = "{\"dependTaskList\":[{\"dependItemList\":[{\"depTasks\":\"1\",\"status\":\"SUCCESS\"}],\"relation\":\"AND\"}],\"relation\":\"AND\"}";
String conditionResult = "{\"successNode\":[\"2\"],\"failedNode\":[\"3\"]}";
public void testBasicSuccess() throws Exception {
TaskInstance taskInstance = testBasicInit(ExecutionStatus.SUCCESS);
ConditionsTaskExecThread taskExecThread = new ConditionsTaskExecThread(taskInstance);
taskExecThread.call();
Assert.assertEquals(ExecutionStatus.SUCCESS, taskExecThread.getTaskInstance().getState());
}
taskInstance.setDependency(dependString);
Mockito.when(processService.submitTask(taskInstance))
.thenReturn(taskInstance);
ConditionsTaskExecThread conditions =
new ConditionsTaskExecThread(taskInstance);
@Test
public void testBasicFailure() throws Exception {
TaskInstance taskInstance = testBasicInit(ExecutionStatus.FAILURE);
ConditionsTaskExecThread taskExecThread = new ConditionsTaskExecThread(taskInstance);
taskExecThread.call();
Assert.assertEquals(ExecutionStatus.FAILURE, taskExecThread.getTaskInstance().getState());
}
try {
conditions.call();
} catch (Exception e) {
e.printStackTrace();
}
private TaskNode getTaskNode() {
TaskNode taskNode = new TaskNode();
taskNode.setId("tasks-1000");
taskNode.setName("C");
taskNode.setType(TaskType.CONDITIONS.toString());
taskNode.setRunFlag(FLOWNODE_RUN_FLAG_NORMAL);
Assert.assertEquals(ExecutionStatus.SUCCESS, conditions.getTaskInstance().getState());
}
DependentItem dependentItem = new DependentItem();
dependentItem.setDepTasks("1");
dependentItem.setStatus(ExecutionStatus.SUCCESS);
DependentTaskModel dependentTaskModel = new DependentTaskModel();
dependentTaskModel.setDependItemList(Stream.of(dependentItem).collect(Collectors.toList()));
dependentTaskModel.setRelation(DependentRelation.AND);
private TaskInstance getTaskInstance(){
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(252612);
taskInstance.setName("C");
taskInstance.setTaskType("CONDITIONS");
taskInstance.setProcessInstanceId(10112);
taskInstance.setProcessDefinitionId(100001);
return taskInstance;
}
DependentParameters dependentParameters = new DependentParameters();
dependentParameters.setDependTaskList(Stream.of(dependentTaskModel).collect(Collectors.toList()));
dependentParameters.setRelation(DependentRelation.AND);
// in: AND(AND(1 is SUCCESS))
taskNode.setDependence(JSONUtils.toJsonString(dependentParameters));
ConditionsParameters conditionsParameters = new ConditionsParameters();
conditionsParameters.setSuccessNode(Stream.of("2").collect(Collectors.toList()));
conditionsParameters.setFailedNode(Stream.of("3").collect(Collectors.toList()));
private List<TaskInstance> getTaskInstances(){
List<TaskInstance> list = new ArrayList<>();
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(199999);
taskInstance.setName("1");
taskInstance.setState(ExecutionStatus.SUCCESS);
list.add(taskInstance);
return list;
// out: SUCCESS => 2, FAILED => 3
taskNode.setConditionResult(JSONUtils.toJsonString(conditionsParameters));
return taskNode;
}
private ProcessInstance getProcessInstance(){
private ProcessInstance getProcessInstance() {
ProcessInstance processInstance = new ProcessInstance();
processInstance.setId(10112);
processInstance.setProcessDefinitionId(100001);
processInstance.setId(1000);
processInstance.setProcessDefinitionId(1000);
processInstance.setState(ExecutionStatus.RUNNING_EXECUTION);
return processInstance;
}
private TaskInstance getTaskInstance(TaskNode taskNode, ProcessInstance processInstance) {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(1000);
taskInstance.setTaskJson(JSONUtils.toJsonString(taskNode));
taskInstance.setName(taskNode.getName());
taskInstance.setTaskType(taskNode.getType());
taskInstance.setProcessInstanceId(processInstance.getId());
taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId());
return taskInstance;
}
private TaskInstance getTaskInstanceForValidTaskList(int id, String name, ExecutionStatus state) {
TaskInstance taskInstance = new TaskInstance();
taskInstance.setId(id);
taskInstance.setName(name);
taskInstance.setState(state);
return taskInstance;
}
}

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/consumer/TaskPriorityQueueConsumerTest.java

@ -55,7 +55,8 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class, CuratorZookeeperClient.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, TaskPriorityQueueConsumer.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class})
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, MasterConfig.class,
CuratorZookeeperClient.class})
public class TaskPriorityQueueConsumerTest {

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/ExecutorDispatcherTest.java

@ -31,6 +31,7 @@ import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Test;
@ -46,7 +47,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringApplicationContext.class, SpringZKServer.class, WorkerRegistry.class,
NettyExecutorManager.class, ExecutorDispatcher.class, ZookeeperRegistryCenter.class, WorkerConfig.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class})
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, CuratorZookeeperClient.class})
public class ExecutorDispatcherTest {
@Autowired

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/executor/NettyExecutorManagerTest.java

@ -37,6 +37,7 @@ import org.apache.dolphinscheduler.server.worker.processor.TaskExecuteProcessor;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
@ -52,7 +53,7 @@ import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, WorkerRegistry.class,
ZookeeperNodeManager.class, ZookeeperRegistryCenter.class, WorkerConfig.class,
ZookeeperNodeManager.class, ZookeeperRegistryCenter.class, WorkerConfig.class, CuratorZookeeperClient.class,
ZookeeperCachedOperator.class, ZookeeperConfig.class, SpringApplicationContext.class, NettyExecutorManager.class})
public class NettyExecutorManagerTest {

3
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/dispatch/host/RoundRobinHostManagerTest.java

@ -28,6 +28,7 @@ import org.apache.dolphinscheduler.server.utils.ExecutionContextTestUtils;
import org.apache.dolphinscheduler.server.worker.config.WorkerConfig;
import org.apache.dolphinscheduler.server.worker.registry.WorkerRegistry;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
@ -43,7 +44,7 @@ import org.springframework.test.context.junit4.SpringRunner;
*/
@RunWith(SpringRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, WorkerRegistry.class, ZookeeperRegistryCenter.class, WorkerConfig.class,
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class})
ZookeeperNodeManager.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, CuratorZookeeperClient.class})
public class RoundRobinHostManagerTest {

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/processor/queue/TaskResponseServiceTest.java

@ -22,6 +22,7 @@ import org.apache.dolphinscheduler.server.registry.DependencyConfig;
import org.apache.dolphinscheduler.server.registry.ZookeeperNodeManager;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
@ -35,7 +36,8 @@ import java.util.Date;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(classes={DependencyConfig.class, SpringZKServer.class, TaskResponseService.class, ZookeeperRegistryCenter.class,
ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class, TaskResponseService.class})
ZookeeperCachedOperator.class, ZookeeperConfig.class, ZookeeperNodeManager.class, TaskResponseService.class,
CuratorZookeeperClient.class})
public class TaskResponseServiceTest {
@Autowired

4
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/master/registry/MasterRegistryTest.java

@ -21,6 +21,7 @@ import org.apache.dolphinscheduler.remote.utils.Constants;
import org.apache.dolphinscheduler.server.master.config.MasterConfig;
import org.apache.dolphinscheduler.server.registry.ZookeeperRegistryCenter;
import org.apache.dolphinscheduler.server.zk.SpringZKServer;
import org.apache.dolphinscheduler.service.zk.CuratorZookeeperClient;
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator;
import org.apache.dolphinscheduler.service.zk.ZookeeperConfig;
import org.junit.Assert;
@ -38,7 +39,8 @@ import static org.apache.dolphinscheduler.common.Constants.HEARTBEAT_FOR_ZOOKEEP
* master registry test
*/
@RunWith(SpringRunner.class)
@ContextConfiguration(classes={SpringZKServer.class, MasterRegistry.class,ZookeeperRegistryCenter.class, MasterConfig.class, ZookeeperCachedOperator.class, ZookeeperConfig.class})
@ContextConfiguration(classes = {SpringZKServer.class, MasterRegistry.class, ZookeeperRegistryCenter.class,
MasterConfig.class, ZookeeperCachedOperator.class, ZookeeperConfig.class, CuratorZookeeperClient.class})
public class MasterRegistryTest {
@Autowired

171
dolphinscheduler-server/src/test/java/org/apache/dolphinscheduler/server/worker/runner/TaskExecuteThreadTest.java

@ -0,0 +1,171 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.server.worker.runner;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.task.AbstractParameters;
import org.apache.dolphinscheduler.common.utils.CommonUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.LoggerUtils;
import org.apache.dolphinscheduler.remote.command.Command;
import org.apache.dolphinscheduler.remote.command.TaskExecuteAckCommand;
import org.apache.dolphinscheduler.remote.command.TaskExecuteResponseCommand;
import org.apache.dolphinscheduler.server.entity.TaskExecutionContext;
import org.apache.dolphinscheduler.server.worker.cache.impl.TaskExecutionContextCacheManagerImpl;
import org.apache.dolphinscheduler.server.worker.processor.TaskCallbackService;
import org.apache.dolphinscheduler.server.worker.task.AbstractTask;
import org.apache.dolphinscheduler.server.worker.task.TaskManager;
import org.apache.dolphinscheduler.service.bean.SpringApplicationContext;
import java.util.Date;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* test task execute thread.
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest({TaskManager.class, JSONUtils.class, CommonUtils.class, SpringApplicationContext.class})
public class TaskExecuteThreadTest {
private TaskExecutionContext taskExecutionContext;
private TaskCallbackService taskCallbackService;
private Command ackCommand;
private Command responseCommand;
private Logger taskLogger;
private TaskExecutionContextCacheManagerImpl taskExecutionContextCacheManager;
@Before
public void before() {
// init task execution context, logger
taskExecutionContext = new TaskExecutionContext();
taskExecutionContext.setProcessId(12345);
taskExecutionContext.setProcessDefineId(1);
taskExecutionContext.setProcessInstanceId(1);
taskExecutionContext.setTaskInstanceId(1);
taskExecutionContext.setTaskType("");
taskExecutionContext.setFirstSubmitTime(new Date());
taskExecutionContext.setDelayTime(0);
taskExecutionContext.setLogPath("/tmp/test.log");
taskExecutionContext.setHost("localhost");
taskExecutionContext.setExecutePath("/tmp/dolphinscheduler/exec/process/1/2/3/4");
ackCommand = new TaskExecuteAckCommand().convert2Command();
responseCommand = new TaskExecuteResponseCommand(taskExecutionContext.getTaskInstanceId()).convert2Command();
taskLogger = LoggerFactory.getLogger(LoggerUtils.buildTaskId(
LoggerUtils.TASK_LOGGER_INFO_PREFIX,
taskExecutionContext.getProcessDefineId(),
taskExecutionContext.getProcessInstanceId(),
taskExecutionContext.getTaskInstanceId()
));
taskExecutionContextCacheManager = new TaskExecutionContextCacheManagerImpl();
taskExecutionContextCacheManager.cacheTaskExecutionContext(taskExecutionContext);
taskCallbackService = PowerMockito.mock(TaskCallbackService.class);
PowerMockito.doNothing().when(taskCallbackService).sendAck(taskExecutionContext.getTaskInstanceId(), ackCommand);
PowerMockito.doNothing().when(taskCallbackService).sendResult(taskExecutionContext.getTaskInstanceId(), responseCommand);
PowerMockito.mockStatic(SpringApplicationContext.class);
PowerMockito.when(SpringApplicationContext.getBean(TaskExecutionContextCacheManagerImpl.class))
.thenReturn(taskExecutionContextCacheManager);
PowerMockito.mockStatic(TaskManager.class);
PowerMockito.when(TaskManager.newTask(taskExecutionContext, taskLogger))
.thenReturn(new SimpleTask(taskExecutionContext, taskLogger));
PowerMockito.mockStatic(JSONUtils.class);
PowerMockito.when(JSONUtils.parseObject(taskExecutionContext.getTaskJson(), TaskNode.class))
.thenReturn(new TaskNode());
PowerMockito.mockStatic(CommonUtils.class);
PowerMockito.when(CommonUtils.getSystemEnvPath()).thenReturn("/user_home/.bash_profile");
}
@Test
public void testNormalExecution() {
taskExecutionContext.setTaskType("SQL");
taskExecutionContext.setStartTime(new Date());
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.RUNNING_EXECUTION);
TaskExecuteThread taskExecuteThread = new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger);
taskExecuteThread.run();
Assert.assertEquals(ExecutionStatus.SUCCESS, taskExecutionContext.getCurrentExecutionStatus());
}
@Test
public void testDelayExecution() {
taskExecutionContext.setTaskType("PYTHON");
taskExecutionContext.setStartTime(null);
taskExecutionContext.setDelayTime(1);
taskExecutionContext.setCurrentExecutionStatus(ExecutionStatus.DELAY_EXECUTION);
TaskExecuteThread taskExecuteThread = new TaskExecuteThread(taskExecutionContext, taskCallbackService, taskLogger);
taskExecuteThread.run();
Assert.assertEquals(ExecutionStatus.SUCCESS, taskExecutionContext.getCurrentExecutionStatus());
}
private class SimpleTask extends AbstractTask {
protected SimpleTask(TaskExecutionContext taskExecutionContext, Logger logger) {
super(taskExecutionContext, logger);
// pid
this.processId = taskExecutionContext.getProcessId();
}
@Override
public AbstractParameters getParameters() {
return null;
}
@Override
public void init() {
}
@Override
public void handle() throws Exception {
}
@Override
public void after() {
}
@Override
public ExecutionStatus getExitStatus() {
return ExecutionStatus.SUCCESS;
}
}
}

98
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/process/ProcessService.java

@ -16,31 +16,89 @@
*/
package org.apache.dolphinscheduler.service.process;
import com.cronutils.model.Cron;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.commons.lang.ArrayUtils;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_EMPTY_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_RECOVER_PROCESS_ID_STRING;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_DEFINE_ID;
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_SUB_PROCESS_PARENT_INSTANCE_ID;
import static org.apache.dolphinscheduler.common.Constants.YYYY_MM_DD_HH_MM_SS;
import static java.util.stream.Collectors.toSet;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.*;
import org.apache.dolphinscheduler.common.enums.AuthorizationType;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.CycleEnum;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.FailureStrategy;
import org.apache.dolphinscheduler.common.enums.Flag;
import org.apache.dolphinscheduler.common.enums.ResourceType;
import org.apache.dolphinscheduler.common.enums.TaskDependType;
import org.apache.dolphinscheduler.common.enums.WarningType;
import org.apache.dolphinscheduler.common.model.DateInterval;
import org.apache.dolphinscheduler.common.model.TaskNode;
import org.apache.dolphinscheduler.common.process.Property;
import org.apache.dolphinscheduler.common.task.subprocess.SubProcessParameters;
import org.apache.dolphinscheduler.common.utils.*;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.common.utils.CollectionUtils;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.JSONUtils;
import org.apache.dolphinscheduler.common.utils.ParameterUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.Command;
import org.apache.dolphinscheduler.dao.entity.CycleDependency;
import org.apache.dolphinscheduler.dao.entity.DataSource;
import org.apache.dolphinscheduler.dao.entity.ErrorCommand;
import org.apache.dolphinscheduler.dao.entity.ProcessData;
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition;
import org.apache.dolphinscheduler.dao.entity.ProcessInstance;
import org.apache.dolphinscheduler.dao.entity.ProcessInstanceMap;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.Resource;
import org.apache.dolphinscheduler.dao.entity.Schedule;
import org.apache.dolphinscheduler.dao.entity.TaskInstance;
import org.apache.dolphinscheduler.dao.entity.Tenant;
import org.apache.dolphinscheduler.dao.entity.UdfFunc;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper;
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.TenantMapper;
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper;
import org.apache.dolphinscheduler.dao.mapper.UserMapper;
import org.apache.dolphinscheduler.remote.utils.Host;
import org.apache.dolphinscheduler.service.log.LogClientService;
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.quartz.CronExpression;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toSet;
import static org.apache.dolphinscheduler.common.Constants.*;
import com.cronutils.model.Cron;
import com.fasterxml.jackson.databind.node.ObjectNode;
/**
* process relative dao that some mappers in this.
@ -52,6 +110,7 @@ public class ProcessService {
private final int[] stateArray = new int[]{ExecutionStatus.SUBMITTED_SUCCESS.ordinal(),
ExecutionStatus.RUNNING_EXECUTION.ordinal(),
ExecutionStatus.DELAY_EXECUTION.ordinal(),
ExecutionStatus.READY_PAUSE.ordinal(),
ExecutionStatus.READY_STOP.ordinal()};
@ -1001,8 +1060,9 @@ public class ProcessService {
if(taskInstance.getState() != ExecutionStatus.NEED_FAULT_TOLERANCE){
taskInstance.setRetryTimes(taskInstance.getRetryTimes() + 1 );
}
taskInstance.setSubmitTime(null);
taskInstance.setStartTime(null);
taskInstance.setEndTime(null);
taskInstance.setStartTime(new Date());
taskInstance.setFlag(Flag.YES);
taskInstance.setHost(null);
taskInstance.setId(0);
@ -1012,7 +1072,12 @@ public class ProcessService {
taskInstance.setExecutorId(processInstance.getExecutorId());
taskInstance.setProcessInstancePriority(processInstance.getProcessInstancePriority());
taskInstance.setState(getSubmitTaskState(taskInstance, processInstanceState));
taskInstance.setSubmitTime(new Date());
if (taskInstance.getSubmitTime() == null) {
taskInstance.setSubmitTime(new Date());
}
if (taskInstance.getFirstSubmitTime() == null) {
taskInstance.setFirstSubmitTime(taskInstance.getSubmitTime());
}
boolean saveResult = saveTaskInstance(taskInstance);
if(!saveResult){
return null;
@ -1062,10 +1127,11 @@ public class ProcessService {
public ExecutionStatus getSubmitTaskState(TaskInstance taskInstance, ExecutionStatus processInstanceState){
ExecutionStatus state = taskInstance.getState();
if(
// running or killed
// running, delayed or killed
// the task already exists in task queue
// return state
state == ExecutionStatus.RUNNING_EXECUTION
|| state == ExecutionStatus.DELAY_EXECUTION
|| state == ExecutionStatus.KILL
|| checkTaskExistsInTaskQueue(taskInstance)
){
@ -1588,7 +1654,7 @@ public class ProcessService {
*/
public List<CycleDependency> getCycleDependencies(int masterId,int[] ids,Date scheduledFireTime) throws Exception {
List<CycleDependency> cycleDependencyList = new ArrayList<CycleDependency>();
if(ArrayUtils.isEmpty(ids)){
if (ids == null || ids.length == 0) {
logger.warn("ids[] is empty!is invalid!");
return cycleDependencyList;
}
@ -1772,7 +1838,7 @@ public class ProcessService {
public <T> List<T> listUnauthorized(int userId,T[] needChecks,AuthorizationType authorizationType){
List<T> resultList = new ArrayList<T>();
if (!ArrayUtils.isEmpty(needChecks)) {
if (Objects.nonNull(needChecks) && needChecks.length > 0) {
Set<T> originResSet = new HashSet<T>(Arrays.asList(needChecks));
switch (authorizationType){

23
dolphinscheduler-service/src/main/java/org/apache/dolphinscheduler/service/quartz/QuartzExecutors.java

@ -61,20 +61,20 @@ public class QuartzExecutors {
*/
private static Scheduler scheduler;
/**
* instance of QuartzExecutors
*/
private static volatile QuartzExecutors INSTANCE = null;
/**
* load conf
*/
private static Configuration conf;
private static final class Holder {
private static final QuartzExecutors instance = new QuartzExecutors();
}
private QuartzExecutors() {
try {
conf = new PropertiesConfiguration(QUARTZ_PROPERTIES_PATH);
init();
}catch (ConfigurationException e){
logger.warn("not loaded quartz configuration file, will used default value",e);
}
@ -85,18 +85,7 @@ public class QuartzExecutors {
* @return instance of Quartz Executors
*/
public static QuartzExecutors getInstance() {
if (INSTANCE == null) {
synchronized (QuartzExecutors.class) {
// when more than two threads run into the first null check same time, to avoid instanced more than one time, it needs to be checked again.
if (INSTANCE == null) {
QuartzExecutors quartzExecutors = new QuartzExecutors();
//finish QuartzExecutors init
quartzExecutors.init();
INSTANCE = quartzExecutors;
}
}
}
return INSTANCE;
return Holder.instance;
}

7
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/config.js

@ -229,6 +229,13 @@ const tasksState = {
color: '#5101be',
icoUnicode: 'ans-icon-dependence',
isSpin: false
},
DELAY_EXECUTION: {
id: 12,
desc: `${i18n.$t('Delay execution')}`,
color: '#5102ce',
icoUnicode: 'ans-icon-coin',
isSpin: false
}
}

21
dolphinscheduler-ui/src/js/conf/home/pages/dag/_source/formModel/formModel.vue

@ -109,6 +109,20 @@
<span>({{$t('Minute')}})</span>
</div>
</div>
<!-- Delay execution time -->
<div class="clearfix list" v-if="taskType !== 'SUB_PROCESS' && taskType !== 'CONDITIONS' && taskType !== 'DEPENDENT'">
<div class="text-box">
<span>{{$t('Delay execution time')}}</span>
</div>
<div class="cont-box">
<m-select-input v-model="delayTime" :list="[0,1,5,10]">
</m-select-input>
<span>({{$t('Minute')}})</span>
</div>
</div>
<!-- Branch flow -->
<div class="clearfix list" v-if="taskType === 'CONDITIONS'">
<div class="text-box">
<span>{{$t('State')}}</span>
@ -127,7 +141,6 @@
</x-select>
</div>
</div>
<div class="clearfix list" v-if="taskType === 'CONDITIONS'">
<div class="text-box">
<span>{{$t('State')}}</span>
@ -339,6 +352,8 @@
maxRetryTimes: '0',
// Failure retry interval
retryInterval: '1',
// Delay execution time
delayTime: '0',
// Task timeout alarm
timeout: {},
// Task priority
@ -466,6 +481,7 @@
dependence: this.cacheDependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
delayTime: this.delayTime,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
@ -544,6 +560,7 @@
dependence: this.dependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
delayTime: this.delayTime,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroup: this.workerGroup,
@ -634,6 +651,7 @@
this.description = o.description
this.maxRetryTimes = o.maxRetryTimes
this.retryInterval = o.retryInterval
this.delayTime = o.delayTime
if(o.conditionResult) {
this.successBranch = o.conditionResult.successNode[0]
this.failedBranch = o.conditionResult.failedNode[0]
@ -699,6 +717,7 @@
dependence: this.cacheDependence,
maxRetryTimes: this.maxRetryTimes,
retryInterval: this.retryInterval,
delayTime: this.delayTime,
timeout: this.timeout,
taskInstancePriority: this.taskInstancePriority,
workerGroup: this.workerGroup,

3
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/_source/instanceConditions/common.js

@ -60,6 +60,9 @@ const stateType = [
}, {
code: 'WAITTING_DEPEND',
label: `${i18n.$t('Waiting for dependency to complete')}`
}, {
code: 'DELAY_EXECUTION',
label: `${i18n.$t('Delay execution')}`
}
]

147
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/_source/taskStatusCount.vue

@ -0,0 +1,147 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
<template>
<div class="task-status-count-model">
<div v-show="!msg">
<div class="data-area" v-spin="isSpin" style="height: 430px;">
<div class="col-md-7">
<div id="task-status-pie" style="height:260px;margin-top: 100px;"></div>
</div>
<div class="col-md-5">
<div class="table-small-model">
<table>
<tr>
<th width="40">{{$t('#')}}</th>
<th>{{$t('Number')}}</th>
<th>{{$t('State')}}</th>
</tr>
<tr v-for="(item,$index) in taskStatusList" :key="$index">
<td><span>{{$index+1}}</span></td>
<td>
<span>
<a href="javascript:" @click="searchParams.projectId && _goTask(item.key)" :class="searchParams.projectId ?'links':''">{{item.value}}</a>
</span>
</td>
<td><span class="ellipsis" style="width: 98%;" :title="item.key">{{item.key}}</span></td>
</tr>
</table>
</div>
</div>
</div>
</div>
<div v-show="msg">
<m-no-data :msg="msg" v-if="msg" :height="430"></m-no-data>
</div>
</div>
</template>
<script>
import _ from 'lodash'
import { mapActions } from 'vuex'
import { pie } from './chartConfig'
import Chart from '@/module/ana-charts'
import mNoData from '@/module/components/noData/noData'
import { stateType } from '@/conf/home/pages/projects/pages/_source/instanceConditions/common'
export default {
name: 'task-status-count',
data () {
return {
isSpin: true,
msg: '',
taskStatusList: []
}
},
props: {
searchParams: Object
},
methods: {
...mapActions('projects', ['getTaskStatusCount']),
_goTask (name) {
this.$router.push({
name: 'task-instance',
query: {
stateType: _.find(stateType, ['label', name])['code'],
startDate: this.searchParams.startDate,
endDate: this.searchParams.endDate
}
})
},
_handleTaskStatus (res) {
let data = res.data.taskCountDtos
this.taskStatusList = _.map(data, v => {
return {
// CHECK!!
key: _.find(stateType, ['code', v.taskStateType])['label'],
value: v.count,
type: 'type'
}
})
const myChart = Chart.pie('#task-status-pie', this.taskStatusList, { title: '' })
myChart.echart.setOption(pie)
// Jump forbidden in index page
if (this.searchParams.projectId) {
myChart.echart.on('click', e => {
this._goTask(e.data.name)
})
}
}
},
watch: {
'searchParams': {
deep: true,
immediate: true,
handler (o) {
this.isSpin = true
this.getTaskStatusCount(o).then(res => {
this.taskStatusList = []
this._handleTaskStatus(res)
this.isSpin = false
}).catch(e => {
console.log(e)
this.msg = e.msg || 'error'
this.isSpin = false
})
}
}
},
beforeCreate () {
},
created () {
},
beforeMount () {
},
mounted () {
},
beforeUpdate () {
},
updated () {
},
beforeDestroy () {
},
destroyed () {
},
computed: {},
components: { mNoData }
}
</script>
<style lang="scss" rel="stylesheet/scss">
.task-status-count-model {
}
</style>

8
dolphinscheduler-ui/src/js/conf/home/pages/projects/pages/index/index.vue

@ -35,8 +35,8 @@
<span>{{$t('Task status statistics')}}</span>
</div>
<div class="row">
<m-task-ctatus-count :search-params="searchParams">
</m-task-ctatus-count>
<m-task-status-count :search-params="searchParams">
</m-task-status-count>
</div>
</div>
<div class="col-md-6">
@ -68,7 +68,7 @@
import dayjs from 'dayjs'
import mDefineUserCount from './_source/defineUserCount'
import mCommandStateCount from './_source/commandStateCount'
import mTaskCtatusCount from './_source/taskCtatusCount'
import mTaskStatusCount from './_source/taskStatusCount'
import mProcessStateCount from './_source/processStateCount'
import mQueueCount from './_source/queueCount'
import localStore from '@/module/util/localStorage'
@ -105,7 +105,7 @@
mListConstruction,
mDefineUserCount,
mCommandStateCount,
mTaskCtatusCount,
mTaskStatusCount,
mProcessStateCount,
mQueueCount
}

2
dolphinscheduler-ui/src/js/conf/home/store/projects/actions.js

@ -69,7 +69,7 @@ export default {
/**
* Task status statistics
*/
getTaskCtatusCount ({ state }, payload) {
getTaskStatusCount ({ state }, payload) {
return new Promise((resolve, reject) => {
io.get('projects/analysis/task-state-count', payload, res => {
resolve(res)

2
dolphinscheduler-ui/src/js/module/i18n/locale/en_US.js

@ -42,6 +42,8 @@ export default {
Times: 'Times',
'Failed retry interval': 'Failed retry interval',
Minute: 'Minute',
'Delay execution time': 'Delay execution time',
'Delay execution': 'Delay execution',
Cancel: 'Cancel',
'Confirm add': 'Confirm add',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': 'The newly created sub-Process has not yet been executed and cannot enter the sub-Process',

2
dolphinscheduler-ui/src/js/module/i18n/locale/zh_CN.js

@ -43,6 +43,7 @@ export default {
Times: '次',
'Failed retry interval': '失败重试间隔',
Minute: '分',
'Delay execution time': '延时执行时间',
Cancel: '取消',
'Confirm add': '确认添加',
'The newly created sub-Process has not yet been executed and cannot enter the sub-Process': '新创建子工作流还未执行不能进入子工作流',
@ -425,6 +426,7 @@ export default {
hour: '时',
Running: '正在运行',
'Waiting for dependency to complete': '等待依赖完成',
'Delay execution': '延时执行',
Selected: '已选',
CurrentHour: '当前小时',
Last1Hour: '前1小时',

40
e2e/src/test/java/org/apache/dolphinscheduler/common/BrowserCommon.java

@ -223,22 +223,46 @@ public class BrowserCommon {
/**
* mouse drag element
*
* @param source_locator BY
* @param target_locator BY
* @param sourceLocator BY
* @param targetLocator BY
*/
public void dragAndDrop(By source_locator, By target_locator) {
WebElement sourceElement = locateElement(source_locator);
WebElement targetElement = locateElement(target_locator);
public void dragAndDrop(By sourceLocator, By targetLocator) {
WebElement sourceElement = locateElement(sourceLocator);
WebElement targetElement = locateElement(targetLocator);
actions.dragAndDrop(sourceElement, targetElement).perform();
actions.release();
}
public void moveToDragElement(By target_locator, int X, int Y) {
WebElement targetElement = locateElement(target_locator);
actions.dragAndDropBy(targetElement, X, Y).perform();
public void moveToDragElement(By targetLocator, int x, int y) {
WebElement targetElement = locateElement(targetLocator);
actions.dragAndDropBy(targetElement, x, y).perform();
actions.release();
}
/**
* Right mouse click on the element
*
* @param locator By
* @return actions
*/
public void mouseRightClickElement(By locator) {
WebElement mouseRightClickElement = locateElement(locator);
actions.contextClick(mouseRightClickElement).perform();
}
/**
* The mouse moves from a position to a specified positionØ
*
* @param sourceLocator BY
* @param targetLocator BY
* @return actions
*/
public void mouseMovePosition(By sourceLocator, By targetLocator) throws InterruptedException {
WebElement sourceElement = locateElement(sourceLocator);
WebElement targetElement = locateElement(targetLocator);
actions.dragAndDrop(sourceElement,targetElement).perform();
actions.click();
}
/**
* jump page

12
e2e/src/test/java/org/apache/dolphinscheduler/locator/project/WorkflowDefineLocator.java

@ -120,12 +120,22 @@ public class WorkflowDefineLocator {
//click submit button
public static final By CLICK_SUBMIT_BUTTON = By.xpath("//div[3]/div/button[2]/span");
//copy task
public static final By MOUSE_RIGHT_CLICK = By.xpath("//div[2]/div[2]/div/div/div/div/div[2]");
public static final By COPY_TASK = By.xpath("//a[3]/span");
//click line
public static final By CLICK_LINE = By.xpath("//a[@id='line']/button/i");
public static final By LINE_SOURCES_TASK = By.xpath("//div[@id='canvas']/div[1]/div[2]");
public static final By LINE_TARGET_TASK = By.xpath("//div[@id='canvas']/div[2]/div[2]");
/**
* save workflow
*/
//click save workflow button
public static final By CLICK_SAVE_WORKFLOW_BUTTON = By.xpath("//div[2]/div[1]/div[2]/button[2]");
public static final By CLICK_SAVE_WORKFLOW_BUTTON = By.xpath("//div[2]/div[1]/div[2]/button[2]/span");
//input workflow name
public static final By INPUT_WORKFLOW_NAME = By.xpath("//input");

5
e2e/src/test/java/org/apache/dolphinscheduler/page/project/WorkflowDefinePage.java

@ -131,6 +131,11 @@ public class WorkflowDefinePage extends PageCommon {
System.out.println("move to Dag Element ");
moveToDragElement(WorkflowDefineLocator.MOUSE_MOVE_SHELL_AT_DAG,-300,-100);
System.out.println("copy task");
mouseRightClickElement(WorkflowDefineLocator.MOUSE_RIGHT_CLICK);
clickButton(WorkflowDefineLocator.COPY_TASK);
clickButton(WorkflowDefineLocator.CLICK_LINE);
mouseMovePosition(WorkflowDefineLocator.LINE_SOURCES_TASK,WorkflowDefineLocator.LINE_TARGET_TASK);
return ifTitleContains(WorkflowDefineData.CREATE_WORKFLOW_TITLE);
}

5
pom.xml

@ -792,11 +792,13 @@
<include>**/common/utils/RetryerUtilsTest.java</include>
<include>**/common/plugin/FilePluginManagerTest</include>
<include>**/common/plugin/PluginClassLoaderTest</include>
<include>**/common/enums/ExecutionStatusTest</include>
<include>**/dao/mapper/AccessTokenMapperTest.java</include>
<include>**/dao/mapper/AlertGroupMapperTest.java</include>
<include>**/dao/mapper/CommandMapperTest.java</include>
<include>**/dao/mapper/ConnectionFactoryTest.java</include>
<include>**/dao/mapper/DataSourceMapperTest.java</include>
<include>**/dao/entity/TaskInstanceTest.java</include>
<include>**/dao/entity/UdfFuncTest.java</include>
<include>**/remote/JsonSerializerTest.java</include>
<include>**/remote/RemoveTaskLogResponseCommandTest.java</include>
@ -820,7 +822,7 @@
<include>**/server/master/AlertManagerTest.java</include>
<include>**/server/master/MasterCommandTest.java</include>
<include>**/server/master/DependentTaskTest.java</include>
<!--<include>**/server/master/ConditionsTaskTest.java</include>-->
<include>**/server/master/ConditionsTaskTest.java</include>
<include>**/server/master/MasterExecThreadTest.java</include>
<include>**/server/master/ParamsTest.java</include>
<include>**/server/register/ZookeeperNodeManagerTest.java</include>
@ -841,6 +843,7 @@
<!--<include>**/server/worker/task/http/HttpTaskTest.java</include>-->
<include>**/server/worker/task/sqoop/SqoopTaskTest.java</include>
<include>**/server/worker/EnvFileTest.java</include>
<include>**/server/worker/runner/TaskExecuteThreadTest.java</include>
<include>**/service/quartz/cron/CronUtilsTest.java</include>
<include>**/service/zk/DefaultEnsembleProviderTest.java</include>
<include>**/service/zk/ZKServerTest.java</include>

4
sql/dolphinscheduler-postgre.sql

@ -566,8 +566,10 @@ CREATE TABLE t_ds_task_instance (
retry_interval int DEFAULT NULL ,
max_retry_times int DEFAULT NULL ,
task_instance_priority int DEFAULT NULL ,
worker_group varchar(64),
worker_group varchar(64),
executor_id int DEFAULT NULL ,
first_submit_time timestamp DEFAULT NULL ,
delay_time int DEFAULT '0' ,
PRIMARY KEY (id)
) ;

2
sql/dolphinscheduler_mysql.sql

@ -707,6 +707,8 @@ CREATE TABLE `t_ds_task_instance` (
`task_instance_priority` int(11) DEFAULT NULL COMMENT 'task instance priority:0 Highest,1 High,2 Medium,3 Low,4 Lowest',
`worker_group` varchar(64) DEFAULT NULL COMMENT 'worker group id',
`executor_id` int(11) DEFAULT NULL,
`first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time',
`delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time',
PRIMARY KEY (`id`),
KEY `process_instance_id` (`process_instance_id`) USING BTREE,
KEY `task_instance_index` (`process_definition_id`,`process_instance_id`) USING BTREE,

1
sql/upgrade/1.3.0_schema/mysql/dolphinscheduler_ddl.sql

@ -297,4 +297,3 @@ delimiter ;
CALL ac_dolphin_T_t_ds_user_A_state;
DROP PROCEDURE ac_dolphin_T_t_ds_user_A_state;

58
sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_ddl.sql

@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
SET sql_mode=(SELECT REPLACE(@@sql_mode,'ONLY_FULL_GROUP_BY',''));
-- uc_dolphin_T_t_ds_task_instance_A_first_submit_time
drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_first_submit_time;
delimiter d//
CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time()
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS
WHERE TABLE_NAME='t_ds_task_instance'
AND TABLE_SCHEMA=(SELECT DATABASE())
AND COLUMN_NAME ='first_submit_time')
THEN
ALTER TABLE t_ds_task_instance ADD `first_submit_time` datetime DEFAULT NULL COMMENT 'task first submit time';
END IF;
END;
d//
delimiter ;
CALL uc_dolphin_T_t_ds_task_instance_A_first_submit_time();
DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_first_submit_time;
-- uc_dolphin_T_t_ds_task_instance_A_delay_time
drop PROCEDURE if EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time;
delimiter d//
CREATE PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time()
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS
WHERE TABLE_NAME='t_ds_task_instance'
AND TABLE_SCHEMA=(SELECT DATABASE())
AND COLUMN_NAME ='delay_time')
THEN
ALTER TABLE t_ds_task_instance ADD `delay_time` int(4) DEFAULT '0' COMMENT 'task delay execution time';
END IF;
END;
d//
delimiter ;
CALL uc_dolphin_T_t_ds_task_instance_A_delay_time();
DROP PROCEDURE uc_dolphin_T_t_ds_task_instance_A_delay_time;

16
sql/upgrade/1.3.3_schema/mysql/dolphinscheduler_dml.sql

@ -0,0 +1,16 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

52
sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_ddl.sql

@ -0,0 +1,52 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-- uc_dolphin_T_t_ds_task_instance_A_first_submit_time
delimiter d//
CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_first_submit_time() RETURNS void AS $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS
WHERE TABLE_NAME='t_ds_task_instance'
AND COLUMN_NAME ='first_submit_time')
THEN
ALTER TABLE t_ds_task_instance ADD COLUMN first_submit_time timestamp DEFAULT NULL;
END IF;
END;
$$ LANGUAGE plpgsql;
d//
delimiter ;
SELECT uc_dolphin_T_t_ds_task_instance_A_first_submit_time();
DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_first_submit_time();
-- uc_dolphin_T_t_ds_task_instance_A_delay_time
delimiter d//
CREATE OR REPLACE FUNCTION uc_dolphin_T_t_ds_task_instance_A_delay_time() RETURNS void AS $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM information_schema.COLUMNS
WHERE TABLE_NAME='t_ds_task_instance'
AND COLUMN_NAME ='delay_time')
THEN
ALTER TABLE t_ds_task_instance ADD COLUMN delay_time int DEFAULT '0';
END IF;
END;
$$ LANGUAGE plpgsql;
d//
delimiter ;
SELECT uc_dolphin_T_t_ds_task_instance_A_delay_time();
DROP FUNCTION IF EXISTS uc_dolphin_T_t_ds_task_instance_A_delay_time();

16
sql/upgrade/1.3.3_schema/postgresql/dolphinscheduler_dml.sql

@ -0,0 +1,16 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

3
style/intellij-java-code-style.xml

@ -95,4 +95,7 @@
<option name="JD_KEEP_EMPTY_EXCEPTION" value="false" />
<option name="JD_KEEP_EMPTY_RETURN" value="false" />
</JavaCodeStyleSettings>
<codeStyleSettings language="JAVA">
<option name="BINARY_OPERATION_SIGN_ON_NEXT_LINE" value="true" />
</codeStyleSettings>
</code_scheme>
Loading…
Cancel
Save