diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java index 0ca71da70d..8b28925a43 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessDefinitionController.java @@ -350,4 +350,29 @@ public class ProcessDefinitionController extends BaseController{ } } + /** + * batch delete process definition by ids + * + * @param loginUser + * @param projectName + * @param processDefinitionIds + * @return + */ + @GetMapping(value="/batch-delete") + @ResponseStatus(HttpStatus.OK) + public Result batchDeleteProcessDefinitionByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @PathVariable String projectName, + @RequestParam("processDefinitionIds") String processDefinitionIds + ){ + try{ + logger.info("delete process definition by ids, login user:{}, project name:{}, process definition ids:{}", + loginUser.getUserName(), projectName, processDefinitionIds); + Map result = processDefinitionService.batchDeleteProcessDefinitionByIds(loginUser, projectName, processDefinitionIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg(),e); + return error(Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR.getMsg()); + } + } + } diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java index 2ae21ffb89..cea47be2c6 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/controller/ProcessInstanceController.java @@ -22,6 +22,8 @@ import cn.escheduler.api.utils.Constants; import cn.escheduler.api.utils.Result; import cn.escheduler.common.enums.ExecutionStatus; import cn.escheduler.common.enums.Flag; +import cn.escheduler.common.queue.ITaskQueue; +import cn.escheduler.common.queue.TaskQueueFactory; import cn.escheduler.common.utils.ParameterUtils; import cn.escheduler.dao.model.User; import org.slf4j.Logger; @@ -189,7 +191,9 @@ public class ProcessInstanceController extends BaseController{ try{ logger.info("delete process instance by id, login user:{}, project name:{}, process instance id:{}", loginUser.getUserName(), projectName, processInstanceId); - Map result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId); + // task queue + ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); + Map result = processInstanceService.deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); return returnDataList(result); }catch (Exception e){ logger.error(DELETE_PROCESS_INSTANCE_BY_ID_ERROR.getMsg(),e); @@ -282,4 +286,30 @@ public class ProcessInstanceController extends BaseController{ return error(Status.ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getCode(),ENCAPSULATION_PROCESS_INSTANCE_GANTT_STRUCTURE_ERROR.getMsg()); } } + + /** + * batch delete process instance by ids, at the same time, + * delete task instance and their mapping relation data + * + * @param loginUser + * @param projectName + * @param processInstanceIds + * @return + */ + @GetMapping(value="/batch-delete") + @ResponseStatus(HttpStatus.OK) + public Result batchDeleteProcessInstanceByIds(@RequestAttribute(value = Constants.SESSION_USER) User loginUser, + @PathVariable String projectName, + @RequestParam("processInstanceIds") String processInstanceIds + ){ + try{ + logger.info("delete process instance by ids, login user:{}, project name:{}, process instance ids :{}", + loginUser.getUserName(), projectName, processInstanceIds); + Map result = processInstanceService.batchDeleteProcessInstanceByIds(loginUser, projectName, processInstanceIds); + return returnDataList(result); + }catch (Exception e){ + logger.error(BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg(),e); + return error(Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getCode(), Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR.getMsg()); + } + } } diff --git a/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java b/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java index 5433ff0467..b0b7412dd8 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/controller/SchedulerController.java @@ -232,4 +232,29 @@ public class SchedulerController extends BaseController{ return error(Status.QUERY_SCHEDULE_LIST_ERROR.getCode(), Status.QUERY_SCHEDULE_LIST_ERROR.getMsg()); } } + + /** + * delete schedule by id + * + * @param loginUser + * @param projectName + * @param scheduleId + * @return + */ + @GetMapping(value="/delete") + @ResponseStatus(HttpStatus.OK) + public Result deleteScheduleById(@RequestAttribute(value = SESSION_USER) User loginUser, + @PathVariable String projectName, + @RequestParam("scheduleId") Integer scheduleId + ){ + try{ + logger.info("delete schedule by id, login user:{}, project name:{}, schedule id:{}", + loginUser.getUserName(), projectName, scheduleId); + Map result = schedulerService.deleteScheduleById(loginUser, projectName, scheduleId); + return returnDataList(result); + }catch (Exception e){ + logger.error(DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg(),e); + return error(Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getCode(), Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR.getMsg()); + } + } } diff --git a/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java b/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java index d90cf12858..6e25d91825 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/enums/Status.java @@ -160,6 +160,7 @@ public enum Status { NAME_EXIST(10135, "name {0} already exists"), SAVE_ERROR(10136, "save error"), DELETE_PROJECT_ERROR_DEFINES_NOT_NULL(10137, "please delete the process definitions in project first!"), + BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR(10117,"batch delete process instance by ids {0} error"), @@ -204,8 +205,13 @@ public enum Status { PROCESS_DEFINE_STATE_ONLINE(50021, "process definition {0} is already on line"), DELETE_PROCESS_DEFINE_BY_ID_ERROR(50022,"delete process definition by id error"), SCHEDULE_CRON_STATE_ONLINE(50023,"the status of schedule {0} is already on line"), + DELETE_SCHEDULE_CRON_BY_ID_ERROR(50024,"delete schedule by id error"), + BATCH_DELETE_PROCESS_DEFINE_ERROR(50025,"batch delete process definition error"), + BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR(50026,"batch delete process definition by ids {0} error"), HDFS_NOT_STARTUP(60001,"hdfs not startup"), + HDFS_TERANT_RESOURCES_FILE_EXISTS(60002,"resource file exists,please delete resource first"), + HDFS_TERANT_UDFS_FILE_EXISTS(60003,"udf file exists,please delete resource first"), /** * for monitor diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java index cfa3814616..7b20f516db 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessDefinitionService.java @@ -38,6 +38,7 @@ import cn.escheduler.dao.mapper.*; import cn.escheduler.dao.model.*; import com.alibaba.fastjson.JSON; import com.fasterxml.jackson.core.JsonProcessingException; +import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -356,13 +357,19 @@ public class ProcessDefinitionService extends BaseDAGService { return checkResult; } - ProcessDefinition processDefinition = processDefineMapper.queryByDefineId(processDefinitionId); if (processDefinition == null) { putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefinitionId); return result; } + + // Determine if the login user is the owner of the process definition + if (loginUser.getId() != processDefinition.getUserId()) { + putMsg(result, Status.USER_NO_OPERATION_PERM); + return result; + } + // check process definition is already online if (processDefinition.getReleaseState() == ReleaseState.ONLINE) { putMsg(result, Status.PROCESS_DEFINE_STATE_ONLINE,processDefinitionId); @@ -370,7 +377,7 @@ public class ProcessDefinitionService extends BaseDAGService { } // get the timing according to the process definition - List schedules = scheduleMapper.selectAllByProcessDefineArray(new int[processDefinitionId]); + List schedules = scheduleMapper.queryByProcessDefinitionId(processDefinitionId); if (!schedules.isEmpty() && schedules.size() > 1) { logger.warn("scheduler num is {},Greater than 1",schedules.size()); putMsg(result, Status.DELETE_PROCESS_DEFINE_BY_ID_ERROR); @@ -395,6 +402,55 @@ public class ProcessDefinitionService extends BaseDAGService { return result; } + /** + * batch delete process definition by ids + * + * @param loginUser + * @param projectName + * @param processDefinitionIds + * @return + */ + public Map batchDeleteProcessDefinitionByIds(User loginUser, String projectName, String processDefinitionIds) { + + Map result = new HashMap<>(5); + + Map deleteReuslt = new HashMap<>(5); + + List deleteFailedIdList = new ArrayList(); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + + if(StringUtils.isNotEmpty(processDefinitionIds)){ + String[] processInstanceIdArray = processDefinitionIds.split(","); + + for (String strProcessInstanceId:processInstanceIdArray) { + int processInstanceId = Integer.parseInt(strProcessInstanceId); + try { + deleteReuslt = deleteProcessDefinitionById(loginUser, projectName, processInstanceId); + if(!Status.SUCCESS.equals(deleteReuslt.get(Constants.STATUS))){ + deleteFailedIdList.add(processInstanceId); + logger.error((String)deleteReuslt.get(Constants.MSG)); + } + } catch (Exception e) { + deleteFailedIdList.add(processInstanceId); + } + } + } + + if(deleteFailedIdList.size() > 0){ + putMsg(result, Status.BATCH_DELETE_PROCESS_DEFINE_BY_IDS_ERROR,StringUtils.join(deleteFailedIdList.toArray(),",")); + }else{ + putMsg(result, Status.SUCCESS); + } + return result; + } + /** * release process definition: online / offline * diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java index 43d5377da2..6475b42cee 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/service/ProcessInstanceService.java @@ -30,6 +30,8 @@ import cn.escheduler.common.graph.DAG; import cn.escheduler.common.model.TaskNode; import cn.escheduler.common.model.TaskNodeRelation; import cn.escheduler.common.process.Property; +import cn.escheduler.common.queue.ITaskQueue; +import cn.escheduler.common.queue.TaskQueueFactory; import cn.escheduler.common.utils.CollectionUtils; import cn.escheduler.common.utils.DateUtils; import cn.escheduler.common.utils.JSONUtils; @@ -446,13 +448,13 @@ public class ProcessInstanceService extends BaseDAGService { /** * delete process instance by id, at the same time,delete task instance and their mapping relation data - * * @param loginUser * @param projectName - * @param workflowId + * @param processInstanceId + * @param tasksQueue * @return */ - public Map deleteProcessInstanceById(User loginUser, String projectName, Integer workflowId) { + public Map deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId,ITaskQueue tasksQueue) { Map result = new HashMap<>(5); Project project = projectMapper.queryByName(projectName); @@ -462,16 +464,34 @@ public class ProcessInstanceService extends BaseDAGService { if (resultEnum != Status.SUCCESS) { return checkResult; } - ProcessInstance processInstance = processDao.findProcessInstanceDetailById(workflowId); + ProcessInstance processInstance = processDao.findProcessInstanceDetailById(processInstanceId); + List taskInstanceList = processDao.findValidTaskListByProcessId(processInstanceId); + //process instance priority + int processInstancePriority = processInstance.getProcessInstancePriority().ordinal(); if (processInstance == null) { - putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, workflowId); + putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); return result; } - int delete = processDao.deleteWorkProcessInstanceById(workflowId); - processDao.deleteAllSubWorkProcessByParentId(workflowId); - processDao.deleteWorkProcessMapByParentId(workflowId); + + int delete = processDao.deleteWorkProcessInstanceById(processInstanceId); + processDao.deleteAllSubWorkProcessByParentId(processInstanceId); + processDao.deleteWorkProcessMapByParentId(processInstanceId); if (delete > 0) { + if (CollectionUtils.isNotEmpty(taskInstanceList)){ + for (TaskInstance taskInstance : taskInstanceList){ + // task instance priority + int taskInstancePriority = taskInstance.getTaskInstancePriority().ordinal(); + String nodeValue=processInstancePriority + "_" + processInstanceId + "_" +taskInstancePriority + "_" + taskInstance.getId(); + try { + logger.info("delete task queue node : {}",nodeValue); + tasksQueue.removeNode(cn.escheduler.common.Constants.SCHEDULER_TASKS_QUEUE, nodeValue); + }catch (Exception e){ + logger.error("delete task queue node : {}", nodeValue); + } + } + } + putMsg(result, Status.SUCCESS); } else { putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); @@ -479,6 +499,50 @@ public class ProcessInstanceService extends BaseDAGService { return result; } + /** + * batch delete process instance by ids, at the same time,delete task instance and their mapping relation data + * + * @param loginUser + * @param projectName + * @param processInstanceIds + * @return + */ + public Map batchDeleteProcessInstanceByIds(User loginUser, String projectName, String processInstanceIds) { + // task queue + ITaskQueue tasksQueue = TaskQueueFactory.getTaskQueueInstance(); + + Map result = new HashMap<>(5); + List deleteFailedIdList = new ArrayList(); + + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + if(StringUtils.isNotEmpty(processInstanceIds)){ + String[] processInstanceIdArray = processInstanceIds.split(","); + + for (String strProcessInstanceId:processInstanceIdArray) { + int processInstanceId = Integer.parseInt(strProcessInstanceId); + try { + deleteProcessInstanceById(loginUser, projectName, processInstanceId,tasksQueue); + } catch (Exception e) { + deleteFailedIdList.add(processInstanceId); + } + } + } + if(deleteFailedIdList.size() > 0){ + putMsg(result, Status.BATCH_DELETE_PROCESS_INSTANCE_BY_IDS_ERROR,StringUtils.join(deleteFailedIdList.toArray(),",")); + }else{ + putMsg(result, Status.SUCCESS); + } + + return result; + } + /** * view process instance variables * diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java index 232b9d7b15..f2cf8e1905 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/service/SchedulerService.java @@ -488,4 +488,46 @@ public class SchedulerService extends BaseService { } return null; } + + /** + * delete schedule by id + * + * @param loginUser + * @param projectName + * @param scheduleId + * @return + */ + public Map deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { + + Map result = new HashMap<>(5); + Project project = projectMapper.queryByName(projectName); + + Map checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); + Status resultEnum = (Status) checkResult.get(Constants.STATUS); + if (resultEnum != Status.SUCCESS) { + return checkResult; + } + + Schedule schedule = scheduleMapper.queryById(scheduleId); + + if (schedule == null) { + putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); + return result; + } + // check schedule is already online + if(schedule.getReleaseState() == ReleaseState.ONLINE){ + putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE,schedule.getId()); + return result; + } + + + int delete = scheduleMapper.delete(scheduleId); + + if (delete > 0) { + putMsg(result, Status.SUCCESS); + } else { + putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); + } + return result; + } } \ No newline at end of file diff --git a/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java b/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java index 9fa8466175..68fbc55348 100644 --- a/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java +++ b/escheduler-api/src/main/java/cn/escheduler/api/service/TenantService.java @@ -26,6 +26,7 @@ import cn.escheduler.dao.mapper.TenantMapper; import cn.escheduler.dao.model.Tenant; import cn.escheduler.dao.model.User; import org.apache.commons.lang3.StringUtils; +import org.apache.hadoop.fs.FileStatus; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -219,6 +220,7 @@ public class TenantService extends BaseService{ * @param id * @return */ + @Transactional(value = "TransactionManager", rollbackFor = Exception.class) public Map deleteTenantById(User loginUser, int id) throws Exception { Map result = new HashMap<>(5); @@ -229,6 +231,19 @@ public class TenantService extends BaseService{ Tenant tenant = tenantMapper.queryById(id); String tenantPath = HadoopUtils.getHdfsDataBasePath() + "/" + tenant.getTenantCode(); + + String resourcePath = HadoopUtils.getHdfsDir(tenant.getTenantCode()); + FileStatus[] fileStatus = HadoopUtils.getInstance().listFileStatus(resourcePath); + if (fileStatus.length > 0) { + putMsg(result, Status.HDFS_TERANT_RESOURCES_FILE_EXISTS); + return result; + } + fileStatus = HadoopUtils.getInstance().listFileStatus(HadoopUtils.getHdfsUdfDir(tenant.getTenantCode())); + if (fileStatus.length > 0) { + putMsg(result, Status.HDFS_TERANT_UDFS_FILE_EXISTS); + return result; + } + HadoopUtils.getInstance().delete(tenantPath, true); tenantMapper.deleteById(id); diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java index 48dcd043fa..4de3cbc103 100644 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java +++ b/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessDefinitionServiceTest.java @@ -75,4 +75,15 @@ public class ProcessDefinitionServiceTest { Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); logger.info(JSON.toJSONString(map)); } + + @Test + public void batchDeleteProcessDefinitionByIds() throws Exception { + + User loginUser = new User(); + loginUser.setId(2); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processDefinitionService.batchDeleteProcessDefinitionByIds(loginUser, "li_test_1", "2,3"); + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } } \ No newline at end of file diff --git a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java b/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java index 4fff4f184f..532c08df54 100644 --- a/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java +++ b/escheduler-api/src/test/java/cn/escheduler/api/service/ProcessInstanceServiceTest.java @@ -75,4 +75,16 @@ public class ProcessInstanceServiceTest { Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); logger.info(JSON.toJSONString(map)); } + + @Test + public void batchDeleteProcessInstanceByIds() throws Exception { + + User loginUser = new User(); + loginUser.setId(2); + loginUser.setUserType(UserType.GENERAL_USER); + Map map = processInstanceService.batchDeleteProcessInstanceByIds(loginUser, "li_test_1", "4,2,300"); + + Assert.assertEquals(Status.SUCCESS, map.get(Constants.STATUS)); + logger.info(JSON.toJSONString(map)); + } } \ No newline at end of file diff --git a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java b/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java index ae9d8b6904..0dc54b4c70 100644 --- a/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java +++ b/escheduler-common/src/main/java/cn/escheduler/common/task/sql/SqlParameters.java @@ -73,6 +73,11 @@ public class SqlParameters extends AbstractParameters { */ private List postStatements; + /** + * title + */ + private String title; + /** * receivers */ @@ -139,6 +144,14 @@ public class SqlParameters extends AbstractParameters { this.connParams = connParams; } + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + public String getReceivers() { return receivers; } @@ -190,6 +203,7 @@ public class SqlParameters extends AbstractParameters { ", udfs='" + udfs + '\'' + ", showType='" + showType + '\'' + ", connParams='" + connParams + '\'' + + ", title='" + title + '\'' + ", receivers='" + receivers + '\'' + ", receiversCc='" + receiversCc + '\'' + ", preStatements=" + preStatements + diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java index b76d6347da..bba9e610fc 100644 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java +++ b/escheduler-common/src/main/java/cn/escheduler/common/utils/HadoopUtils.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.yarn.client.cli.RMAdminCLI; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -263,6 +264,22 @@ public class HadoopUtils implements Closeable { return fs.exists(new Path(hdfsFilePath)); } + /** + * Gets a list of files in the directory + * + * @param filePath + * @return {@link FileStatus} + */ + public FileStatus[] listFileStatus(String filePath)throws Exception{ + Path path = new Path(filePath); + try { + return fs.listStatus(new Path(filePath)); + } catch (IOException e) { + logger.error("Get file list exception", e); + throw new Exception("Get file list exception", e); + } + } + /** * Renames Path src to Path dst. Can take place on local fs * or remote DFS. diff --git a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java b/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java index 5dc996bcc5..c6c35d3d70 100644 --- a/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java +++ b/escheduler-common/src/main/java/cn/escheduler/common/utils/placeholder/BusinessTimeUtils.java @@ -50,9 +50,16 @@ public class BusinessTimeUtils { case RECOVER_TOLERANCE_FAULT_PROCESS: case RECOVER_SUSPENDED_PROCESS: case START_FAILURE_TASK_PROCESS: + case REPEAT_RUNNING: case SCHEDULER: default: businessDate = addDays(new Date(), -1); + if (runTime != null){ + /** + * If there is a scheduled time, take the scheduling time. Recovery from failed nodes, suspension of recovery, re-run for scheduling + */ + businessDate = addDays(runTime, -1); + } break; } Date businessCurrentDate = addDays(businessDate, 1); diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java b/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java index 54220f0ea9..670aa1485b 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/ProcessDao.java @@ -1555,6 +1555,14 @@ public class ProcessDao extends AbstractBaseDao { } + public void selfFaultTolerant(int ... states){ + List processInstanceList = processInstanceMapper.listByStatus(states); + for (ProcessInstance processInstance:processInstanceList){ + selfFaultTolerant(processInstance); + } + + } + @Transactional(value = "TransactionManager",rollbackFor = Exception.class) public void selfFaultTolerant(ProcessInstance processInstance){ diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapperProvider.java index 20203da5c6..73228057c2 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/DataSourceMapperProvider.java @@ -111,7 +111,7 @@ public class DataSourceMapperProvider { */ public String queryById(Map parameter) { return new SQL() {{ - SELECT("r.*,u.user_name as userName"); + SELECT("r.*,u.user_name"); FROM(TABLE_NAME + " r"); diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperProvider.java index 192e46aec2..a619ee4ad5 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessDefinitionMapperProvider.java @@ -134,7 +134,7 @@ public class ProcessDefinitionMapperProvider { public String queryByDefineId(Map parameter) { return new SQL() { { - SELECT("pd.*,u.user_name,p.name as projectName"); + SELECT("pd.*,u.user_name,p.name as project_name"); FROM(TABLE_NAME + " pd"); JOIN("t_escheduler_user u ON pd.user_id = u.id"); diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java index 03fbd6db27..c76b7bd950 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapper.java @@ -94,6 +94,7 @@ public interface ProcessInstanceMapper { @Result(property = "dependenceScheduleTimes", column = "dependence_schedule_times", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "duration", column = "duration", javaType = Long.class, jdbcType = JdbcType.BIGINT), @Result(property = "tenantCode", column = "tenant_code", javaType = String.class, jdbcType = JdbcType.VARCHAR), + @Result(property = "queue", column = "queue", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "workerGroupId", column = "worker_group_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "timeout", column = "timeout", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java index cd9daa3781..7e078b995b 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProcessInstanceMapperProvider.java @@ -220,7 +220,7 @@ public class ProcessInstanceMapperProvider { public String queryDetailById(Map parameter) { return new SQL() { { - SELECT("inst.*,q.queue_name as queue,t.tenant_code as tenantCode,UNIX_TIMESTAMP(inst.end_time)-UNIX_TIMESTAMP(inst.start_time) as duration"); + SELECT("inst.*,q.queue_name as queue,t.tenant_code,UNIX_TIMESTAMP(inst.end_time)-UNIX_TIMESTAMP(inst.start_time) as duration"); FROM(TABLE_NAME + " inst, t_escheduler_user u,t_escheduler_tenant t,t_escheduler_queue q"); diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java index 0b044781a9..ab44b7444d 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapper.java @@ -67,6 +67,7 @@ public interface ProjectMapper { @Result(property = "userId", column = "user_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "name", column = "name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "desc", column = "desc", javaType = String.class, jdbcType = JdbcType.VARCHAR), + @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), }) @@ -82,6 +83,7 @@ public interface ProjectMapper { @Result(property = "userId", column = "user_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "name", column = "name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "desc", column = "desc", javaType = String.class, jdbcType = JdbcType.VARCHAR), + @Result(property = "userName", column = "user_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), }) @@ -115,6 +117,8 @@ public interface ProjectMapper { @Result(property = "perm", column = "perm", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), + @Result(property = "defCount", column = "def_count", javaType = Integer.class, jdbcType = JdbcType.INTEGER), + @Result(property = "instRunningCount", column = "inst_running_count", javaType = Integer.class, jdbcType = JdbcType.INTEGER), }) @SelectProvider(type = ProjectMapperProvider.class, method = "queryProjectListPaging") List queryProjectListPaging(@Param("userId") Integer userId, @@ -145,6 +149,8 @@ public interface ProjectMapper { @Result(property = "perm", column = "perm", javaType = Integer.class, jdbcType = JdbcType.INTEGER), @Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), + @Result(property = "defCount", column = "def_count", javaType = Integer.class, jdbcType = JdbcType.INTEGER), + @Result(property = "instRunningCount", column = "inst_running_count", javaType = Integer.class, jdbcType = JdbcType.INTEGER), }) @SelectProvider(type = ProjectMapperProvider.class, method = "queryAllProjectListPaging") List queryAllProjectListPaging( diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapperProvider.java index 6de271e59c..d6a2e4adcf 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ProjectMapperProvider.java @@ -93,7 +93,7 @@ public class ProjectMapperProvider { public String queryById(Map parameter) { return new SQL() {{ SELECT("p.user_id"); - SELECT("u.user_name as userName"); + SELECT("u.user_name"); SELECT("p.*"); FROM(TABLE_NAME + " p"); @@ -114,7 +114,7 @@ public class ProjectMapperProvider { public String queryByName(Map parameter) { return new SQL() {{ SELECT("p.user_id"); - SELECT("u.user_name as userName"); + SELECT("u.user_name"); SELECT("p.*"); FROM(TABLE_NAME + " p"); @@ -157,7 +157,8 @@ public class ProjectMapperProvider { return new SQL() {{ SELECT("p.*"); SELECT("u.user_name as user_name"); - + SELECT("(SELECT COUNT(*) FROM t_escheduler_process_definition AS def WHERE def.project_id = p.id) AS def_count"); + SELECT("(SELECT COUNT(*) FROM t_escheduler_process_definition def, t_escheduler_process_instance inst WHERE def.id = inst.process_definition_id AND def.project_id = p.id AND inst.state=1 ) as inst_running_count"); FROM(TABLE_NAME + " p"); JOIN("t_escheduler_user u on u.id=p.user_id"); WHERE("p.id in " + @@ -199,7 +200,8 @@ public class ProjectMapperProvider { return new SQL() {{ SELECT("p.*"); SELECT("u.user_name as user_name"); - + SELECT("(SELECT COUNT(*) FROM t_escheduler_process_definition AS def WHERE def.project_id = p.id) AS def_count"); + SELECT("(SELECT COUNT(*) FROM t_escheduler_process_definition def, t_escheduler_process_instance inst WHERE def.id = inst.process_definition_id AND def.project_id = p.id AND inst.state=1 ) as inst_running_count"); FROM(TABLE_NAME + " p"); JOIN("t_escheduler_user u on p.user_id = u.id"); diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java index 4806979b82..31cc99232a 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapper.java @@ -173,6 +173,30 @@ public interface ScheduleMapper { @SelectProvider(type = ScheduleMapperProvider.class, method = "selectAllByProcessDefineArray") List selectAllByProcessDefineArray(@Param("processDefineIds") int[] processDefineIds); + /** + * query schedule list by definition id + * @param processDefinitionId + * @return + */ + @Results(value = { + @Result(property = "id", column = "id", id = true, javaType = Integer.class, jdbcType = JdbcType.INTEGER), + @Result(property = "processDefinitionId", column = "process_definition_id", id = true, javaType = int.class, jdbcType = JdbcType.INTEGER), + @Result(property = "startTime", column = "start_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP), + @Result(property = "endTime", column = "end_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP), + @Result(property = "crontab", column = "crontab", javaType = String.class, jdbcType = JdbcType.VARCHAR), + @Result(property = "failureStrategy", column = "failure_strategy", typeHandler = EnumOrdinalTypeHandler.class, javaType = FailureStrategy.class, jdbcType = JdbcType.TINYINT), + @Result(property = "warningType", column = "warning_type", typeHandler = EnumOrdinalTypeHandler.class, javaType = WarningType.class, jdbcType = JdbcType.TINYINT), + @Result(property = "createTime", column = "create_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP), + @Result(property = "updateTime", column = "update_time", javaType = Date.class, jdbcType = JdbcType.TIMESTAMP), + @Result(property = "userId", column = "user_id", javaType = int.class, jdbcType = JdbcType.INTEGER), + @Result(property = "releaseState", column = "release_state", typeHandler = EnumOrdinalTypeHandler.class, javaType = ReleaseState.class, jdbcType = JdbcType.TINYINT), + @Result(property = "warningGroupId", column = "warning_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER), + @Result(property = "workerGroupId", column = "worker_group_id", javaType = int.class, jdbcType = JdbcType.INTEGER), + @Result(property = "processInstancePriority", column = "process_instance_priority", javaType = Priority.class, typeHandler = EnumOrdinalTypeHandler.class, jdbcType = JdbcType.TINYINT) + }) + @SelectProvider(type = ScheduleMapperProvider.class, method = "queryByProcessDefinitionId") + List queryByProcessDefinitionId(@Param("processDefinitionId") int processDefinitionId); + /** * delete schedule by id * @param scheduleId diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapperProvider.java index bdfeb409e2..887c3c9117 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/ScheduleMapperProvider.java @@ -181,6 +181,20 @@ public class ScheduleMapperProvider { }}.toString(); } + /** + * query schedule by process definition id + * @param parameter + * @return + */ + public String queryByProcessDefinitionId(Map parameter) { + + return new SQL() {{ + SELECT("*"); + FROM(DB_NAME); + WHERE("process_definition_id = #{processDefinitionId}"); + }}.toString(); + } + /** * delete schedule by id * diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapperProvider.java index 8560fc950b..f8151781d4 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/TenantMapperProvider.java @@ -150,7 +150,7 @@ public class TenantMapperProvider { public String queryTenantPaging(Map parameter) { return new SQL() { { - SELECT("t.*,q.queue_name as queueName"); + SELECT("t.*,q.queue_name"); FROM(TABLE_NAME +" t,t_escheduler_queue q"); WHERE( " t.queue_id = q.id"); Object searchVal = parameter.get("searchVal"); diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java index 9c814543b1..7b5dea5265 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapper.java @@ -175,6 +175,8 @@ public interface UserMapper { @Result(property = "phone", column = "phone", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "userType", column = "user_type", typeHandler = EnumOrdinalTypeHandler.class, javaType = UserType.class, jdbcType = JdbcType.TINYINT), @Result(property = "tenantId", column = "tenant_id", javaType = Integer.class, jdbcType = JdbcType.INTEGER), + @Result(property = "tenantName", column = "tenant_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), + @Result(property = "queueName", column = "queue_name", javaType = String.class, jdbcType = JdbcType.VARCHAR), @Result(property = "createTime", column = "create_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE), @Result(property = "updateTime", column = "update_time", javaType = Timestamp.class, jdbcType = JdbcType.DATE) }) diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java index 2fe0af9f20..f70ba8250f 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/mapper/UserMapperProvider.java @@ -203,7 +203,7 @@ public class UserMapperProvider { public String queryUserPaging(Map parameter) { return new SQL() { { - SELECT("u.*,t.tenant_name as tenantName,q.queue_name as queueName"); + SELECT("u.*,t.tenant_name,q.queue_name"); FROM(TABLE_NAME + " u "); LEFT_OUTER_JOIN("t_escheduler_tenant t on u.tenant_id = t.id"); LEFT_OUTER_JOIN("t_escheduler_queue q on t.queue_id = q.id"); @@ -228,7 +228,7 @@ public class UserMapperProvider { public String queryDetailsById(Map parameter) { return new SQL() { { - SELECT("u.*,q.queue_name as queueName,t.tenant_name as tenantName"); + SELECT("u.*,q.queue_name,t.tenant_name"); FROM(TABLE_NAME + " u,t_escheduler_tenant t,t_escheduler_queue q"); @@ -262,7 +262,7 @@ public class UserMapperProvider { public String queryTenantCodeByUserId(Map parameter) { return new SQL() { { - SELECT("u.*,t.tenant_code as tenantCode"); + SELECT("u.*,t.tenant_code"); FROM(TABLE_NAME + " u,t_escheduler_tenant t"); WHERE("u.tenant_id = t.id AND u.id = #{userId}"); } diff --git a/escheduler-dao/src/main/java/cn/escheduler/dao/model/Project.java b/escheduler-dao/src/main/java/cn/escheduler/dao/model/Project.java index 6d0a9bbdb7..e835c9b97b 100644 --- a/escheduler-dao/src/main/java/cn/escheduler/dao/model/Project.java +++ b/escheduler-dao/src/main/java/cn/escheduler/dao/model/Project.java @@ -63,6 +63,32 @@ public class Project { */ private int perm; + /** + * process define count + */ + private int defCount; + + /** + * process instance running count + */ + private int instRunningCount; + + public int getDefCount() { + return defCount; + } + + public void setDefCount(int defCount) { + this.defCount = defCount; + } + + public int getInstRunningCount() { + return instRunningCount; + } + + public void setInstRunningCount(int instRunningCount) { + this.instRunningCount = instRunningCount; + } + public int getId() { return id; } diff --git a/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java b/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java index fc62bcf73d..147f538bca 100644 --- a/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java +++ b/escheduler-server/src/main/java/cn/escheduler/server/utils/AlertManager.java @@ -175,7 +175,7 @@ public class AlertManager { alert.setContent(content); alert.setAlertType(AlertType.EMAIL); alert.setCreateTime(new Date()); - alert.setAlertGroupId(processInstance.getWarningGroupId()); + alert.setAlertGroupId(processInstance.getWarningGroupId() == null ? 1:processInstance.getWarningGroupId()); alert.setReceivers(processInstance.getProcessDefinition().getReceivers()); alert.setReceiversCc(processInstance.getProcessDefinition().getReceiversCc()); diff --git a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java index f984549e72..89822e710c 100644 --- a/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java +++ b/escheduler-server/src/main/java/cn/escheduler/server/worker/task/sql/SqlTask.java @@ -279,7 +279,11 @@ public class SqlTask extends AbstractTask { logger.info("showType is empty,don't need send email"); } else { if (array.size() > 0) { - sendAttachment(taskProps.getNodeName() + " query resultsets ", JSONObject.toJSONString(array, SerializerFeature.WriteMapNullValue)); + if (StringUtils.isNotEmpty(sqlParameters.getTitle())) { + sendAttachment(sqlParameters.getTitle(), JSONObject.toJSONString(array, SerializerFeature.WriteMapNullValue)); + }else{ + sendAttachment(taskProps.getNodeName() + " query resultsets ", JSONObject.toJSONString(array, SerializerFeature.WriteMapNullValue)); + } } } diff --git a/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java b/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java index 624d0193be..fe3360484c 100644 --- a/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java +++ b/escheduler-server/src/main/java/cn/escheduler/server/zk/ZKMasterClient.java @@ -123,9 +123,9 @@ public class ZKMasterClient extends AbstractZKClient { // register master this.registMaster(); - // check if fault tolerance is required + // check if fault tolerance is required,failure and tolerance if (getActiveMasterNum() == 1) { - processDao.selfFaultTolerant(ExecutionStatus.RUNNING_EXEUTION.ordinal()); + processDao.selfFaultTolerant(ExecutionStatus.RUNNING_EXEUTION.ordinal(),ExecutionStatus.NEED_FAULT_TOLERANCE.ordinal()); } } diff --git a/escheduler-ui/.env b/escheduler-ui/.env index 5cd5ba736d..fc1b619f69 100644 --- a/escheduler-ui/.env +++ b/escheduler-ui/.env @@ -1,5 +1,5 @@ -# 后端接口地址 +# 后端接口地址11 API_BASE = http://192.168.xx.xx:12345 # 本地开发如需ip访问项目把"#"号去掉 diff --git a/escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue b/escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue index 890c25b80b..087b50032a 100644 --- a/escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue +++ b/escheduler-ui/src/js/conf/home/pages/projects/pages/list/_source/list.vue @@ -12,6 +12,12 @@ {{$t('Owned Users')}} + + {{$t('Process Define Count')}} + + + {{$t('Process Instance Running Count')}} + {{$t('Description')}} @@ -37,6 +43,12 @@ {{item.userName || '-'}} + + {{item.defCount}} + + + {{item.instRunningCount}} + {{item.desc}} @@ -150,4 +162,4 @@ }, components: { } } - \ No newline at end of file + diff --git a/escheduler-ui/src/js/conf/home/store/dag/actions.js b/escheduler-ui/src/js/conf/home/store/dag/actions.js index 0872249d9c..929c4f6df5 100644 --- a/escheduler-ui/src/js/conf/home/store/dag/actions.js +++ b/escheduler-ui/src/js/conf/home/store/dag/actions.js @@ -538,7 +538,7 @@ export default { */ getReceiver ({ state }, payload) { return new Promise((resolve, reject) => { - io.get(`projects/{projectName}/executors/get-receiver-cc`, payload, res => { + io.get(`projects/${state.projectName}/executors/get-receiver-cc`, payload, res => { resolve(res.data) }).catch(e => { reject(e) @@ -547,7 +547,7 @@ export default { }, getTaskListDefIdAll ({ state }, payload) { return new Promise((resolve, reject) => { - io.get(`projects/{projectName}/process/get-task-list`, payload, res => { + io.get(`projects/${state.projectName}/process/get-task-list`, payload, res => { resolve(res.data) }).catch(e => { reject(e) diff --git a/escheduler-ui/src/js/module/i18n/locale/en_US.js b/escheduler-ui/src/js/module/i18n/locale/en_US.js index a31e2b5e8d..0a3ba357f4 100644 --- a/escheduler-ui/src/js/module/i18n/locale/en_US.js +++ b/escheduler-ui/src/js/module/i18n/locale/en_US.js @@ -452,4 +452,6 @@ export default { 'Pre Statement': 'Pre Statement', 'Post Statement': 'Post Statement', 'Statement cannot be empty': 'Statement cannot be empty', + 'Process Define Count':'Process Define Count', + 'Process Instance Running Count':'Process Instance Running Count', } diff --git a/escheduler-ui/src/js/module/i18n/locale/zh_CN.js b/escheduler-ui/src/js/module/i18n/locale/zh_CN.js index 87aa0e44e8..e47aa84ec4 100644 --- a/escheduler-ui/src/js/module/i18n/locale/zh_CN.js +++ b/escheduler-ui/src/js/module/i18n/locale/zh_CN.js @@ -452,4 +452,6 @@ export default { 'Pre Statement': '前置sql', 'Post Statement': '后置sql', 'Statement cannot be empty': '语句不能为空', + 'Process Define Count':'流程定义个数', + 'Process Instance Running Count':'运行流程实例个数', }