Browse Source

[Improvement-3369][api] Introduce data analysis service interface for clear code (#3439)

* [Improvement][api] Introduce data analysis service interface for clear code

* Remove the bad smell

* Remove the bad smell
pull/3/MERGE
Yichao Yang 4 years ago committed by GitHub
parent
commit
3ed0afa6ac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 317
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java
  2. 384
      dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java
  3. 33
      dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java
  4. 27
      dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java

317
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/DataAnalysisService.java

@ -17,57 +17,14 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.dto.CommandStateCount; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.dao.entity.*;
import org.apache.dolphinscheduler.dao.mapper.*;
import org.apache.dolphinscheduler.service.process.ProcessService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.text.MessageFormat; import java.util.Map;
import java.util.*;
/** /**
* data analysis service * data analysis service
*/ */
@Service public interface DataAnalysisService {
public class DataAnalysisService extends BaseService{
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisService.class);
@Autowired
ProjectMapper projectMapper;
@Autowired
ProjectService projectService;
@Autowired
ProcessInstanceMapper processInstanceMapper;
@Autowired
ProcessDefinitionMapper processDefinitionMapper;
@Autowired
CommandMapper commandMapper;
@Autowired
ErrorCommandMapper errorCommandMapper;
@Autowired
TaskInstanceMapper taskInstanceMapper;
@Autowired
ProcessService processService;
/** /**
* statistical task instance status data * statistical task instance status data
@ -78,46 +35,7 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date * @param endDate end date
* @return task state count data * @return task state count data
*/ */
public Map<String,Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) { Map<String, Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate);
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> taskInstanceStateCounts =
taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds);
if (taskInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(taskInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
}
/** /**
* statistical process instance status data * statistical process instance status data
@ -128,37 +46,7 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date * @param endDate end date
* @return process instance state count data * @return process instance state count data
*/ */
public Map<String,Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) { Map<String, Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate);
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
Date start = null;
Date end = null;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> processInstanceStateCounts =
processInstanceMapper.countInstanceStateByUser(start, end,
projectIdArray);
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
/** /**
* statistics the process definition quantities of certain person * statistics the process definition quantities of certain person
@ -167,20 +55,7 @@ public class DataAnalysisService extends BaseService{
* @param projectId project id * @param projectId project id
* @return definition count data * @return definition count data
*/ */
public Map<String,Object> countDefinitionByUser(User loginUser, int projectId) { Map<String, Object> countDefinitionByUser(User loginUser, int projectId);
Map<String, Object> result = new HashMap<>();
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), projectIdArray,isAdmin(loginUser));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
return result;
}
/** /**
* statistical command status data * statistical command status data
@ -191,189 +66,15 @@ public class DataAnalysisService extends BaseService{
* @param endDate end date * @param endDate end date
* @return command state count data * @return command state count data
*/ */
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) { Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate);
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
if (startDate != null && endDate != null){
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(),e);
putErrorRequestParamsMsg(result);
return result;
}
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
// count command state
List<CommandCount> commandStateCounts =
commandMapper.countCommandState(
loginUser.getId(),
start,
end,
projectIdArray);
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
//
Map<CommandType,Map<String,Integer>> dataMap = new HashMap<>();
Map<String,Integer> commonCommand = new HashMap<>();
commonCommand.put("commandState",0);
commonCommand.put("errorCommandState",0);
// init data map
/**
* START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS,
START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD;
*/
dataMap.put(CommandType.START_PROCESS,commonCommand);
dataMap.put(CommandType.START_CURRENT_TASK_PROCESS,commonCommand);
dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS,commonCommand);
dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS,commonCommand);
dataMap.put(CommandType.START_FAILURE_TASK_PROCESS,commonCommand);
dataMap.put(CommandType.COMPLEMENT_DATA,commonCommand);
dataMap.put(CommandType.SCHEDULER,commonCommand);
dataMap.put(CommandType.REPEAT_RUNNING,commonCommand);
dataMap.put(CommandType.PAUSE,commonCommand);
dataMap.put(CommandType.STOP,commonCommand);
dataMap.put(CommandType.RECOVER_WAITTING_THREAD,commonCommand);
// put command state
for (CommandCount executeStatusCount : commandStateCounts){
Map<String,Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put("commandState", executeStatusCount.getCount());
dataMap.put(executeStatusCount.getCommandType(),commandStateCountsMap);
}
// put error command state
for (CommandCount errorExecutionStatus : errorCommandStateCounts){
Map<String,Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put("errorCommandState",errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getCommandType(),errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
Iterator<Map.Entry<CommandType, Map<String, Integer>>> iterator = dataMap.entrySet().iterator();
while (iterator.hasNext()){
Map.Entry<CommandType, Map<String, Integer>> next = iterator.next();
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get("errorCommandState"),
next.getValue().get("commandState"),next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
private Integer[] getProjectIdsArrays(User loginUser, int projectId) {
List<Integer> projectIds = new ArrayList<>();
if(projectId !=0){
projectIds.add(projectId);
}else if(loginUser.getUserType() == UserType.GENERAL_USER){
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if(projectIds.size() ==0 ){
projectIds.add(0);
}
}
return projectIds.toArray(new Integer[projectIds.size()]);
}
/** /**
* count queue state * count queue state
*
* @param loginUser login user * @param loginUser login user
* @param projectId project id * @param projectId project id
* @return queue state count data * @return queue state count data
*/ */
public Map<String, Object> countQueueState(User loginUser, int projectId) { Map<String, Object> countQueueState(User loginUser, int projectId);
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if(!checkProject){
return result;
}
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String,Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER){
dataMap.put("taskQueue",tasksQueueList.size());
dataMap.put("taskKill",tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i =0;
for (String taskQueueStr : tasksQueueList){
if (StringUtils.isNotEmpty(taskQueueStr)){
String[] splits = taskQueueStr.split("_");
if (splits.length >= 4){
tasksQueueIds[i++] = Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList){
if (StringUtils.isNotEmpty(taskKillStr)){
String[] splits = taskKillStr.split("-");
if (splits.length == 2){
tasksKillIds[i++] = Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
if (tasksQueueIds.length != 0){
taskQueueCount = taskInstanceMapper.countTask(
projectIds,
tasksQueueIds);
}
if (tasksKillIds.length != 0){
taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds);
}
dataMap.put("taskQueue",taskQueueCount);
dataMap.put("taskKill",taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkProject(User loginUser, int projectId, Map<String, Object> result){
if(projectId != 0){
Project project = projectMapper.selectById(projectId);
return projectService.hasProjectAndPerm(loginUser, project, result);
}
return true;
}
} }

384
dolphinscheduler-api/src/main/java/org/apache/dolphinscheduler/api/service/impl/DataAnalysisServiceImpl.java

@ -0,0 +1,384 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.api.service.impl;
import org.apache.dolphinscheduler.api.dto.CommandStateCount;
import org.apache.dolphinscheduler.api.dto.DefineUserDto;
import org.apache.dolphinscheduler.api.dto.TaskCountDto;
import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.BaseService;
import org.apache.dolphinscheduler.api.service.DataAnalysisService;
import org.apache.dolphinscheduler.api.service.ProjectService;
import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.common.utils.StringUtils;
import org.apache.dolphinscheduler.common.utils.TriFunction;
import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.DefinitionGroupByUser;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
* data analysis service impl
*/
@Service
public class DataAnalysisServiceImpl extends BaseService implements DataAnalysisService {
private static final Logger logger = LoggerFactory.getLogger(DataAnalysisServiceImpl.class);
@Autowired
private ProjectMapper projectMapper;
@Autowired
private ProjectService projectService;
@Autowired
private ProcessInstanceMapper processInstanceMapper;
@Autowired
private ProcessDefinitionMapper processDefinitionMapper;
@Autowired
private CommandMapper commandMapper;
@Autowired
private ErrorCommandMapper errorCommandMapper;
@Autowired
private TaskInstanceMapper taskInstanceMapper;
@Autowired
private ProcessService processService;
private static final String COMMAND_STATE = "commandState";
private static final String ERROR_COMMAND_STATE = "errorCommandState";
/**
* statistical task instance status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return task state count data
*/
public Map<String, Object> countTaskStateByProject(User loginUser, int projectId, String startDate, String endDate) {
return countStateByProject(
loginUser,
projectId,
startDate,
endDate,
(start, end, projectIds) -> this.taskInstanceMapper.countTaskInstanceStateByUser(start, end, projectIds));
}
/**
* statistical process instance status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return process instance state count data
*/
public Map<String, Object> countProcessInstanceStateByProject(User loginUser, int projectId, String startDate, String endDate) {
return this.countStateByProject(
loginUser,
projectId,
startDate,
endDate,
(start, end, projectIds) -> this.processInstanceMapper.countInstanceStateByUser(start, end, projectIds));
}
private Map<String, Object> countStateByProject(User loginUser, int projectId, String startDate, String endDate
, TriFunction<Date, Date, Integer[], List<ExecuteStatusCount>> instanceStateCounter) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
Date start;
Date end;
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(), e);
putErrorRequestParamsMsg(result);
return result;
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<ExecuteStatusCount> processInstanceStateCounts =
instanceStateCounter.apply(start, end, projectIdArray);
if (processInstanceStateCounts != null) {
TaskCountDto taskCountResult = new TaskCountDto(processInstanceStateCounts);
result.put(Constants.DATA_LIST, taskCountResult);
putMsg(result, Status.SUCCESS);
}
return result;
}
/**
* statistics the process definition quantities of certain person
*
* @param loginUser login user
* @param projectId project id
* @return definition count data
*/
public Map<String, Object> countDefinitionByUser(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>();
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
List<DefinitionGroupByUser> defineGroupByUsers = processDefinitionMapper.countDefinitionGroupByUser(
loginUser.getId(), projectIdArray, isAdmin(loginUser));
DefineUserDto dto = new DefineUserDto(defineGroupByUsers);
result.put(Constants.DATA_LIST, dto);
putMsg(result, Status.SUCCESS);
return result;
}
/**
* statistical command status data
*
* @param loginUser login user
* @param projectId project id
* @param startDate start date
* @param endDate end date
* @return command state count data
*/
public Map<String, Object> countCommandState(User loginUser, int projectId, String startDate, String endDate) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
/**
* find all the task lists in the project under the user
* statistics based on task status execution, failure, completion, wait, total
*/
Date start = null;
Date end = null;
if (startDate != null && endDate != null) {
try {
start = DateUtils.getScheduleDate(startDate);
end = DateUtils.getScheduleDate(endDate);
} catch (Exception e) {
logger.error(e.getMessage(), e);
putErrorRequestParamsMsg(result);
return result;
}
}
Integer[] projectIdArray = getProjectIdsArrays(loginUser, projectId);
// count command state
List<CommandCount> commandStateCounts =
commandMapper.countCommandState(
loginUser.getId(),
start,
end,
projectIdArray);
// count error command state
List<CommandCount> errorCommandStateCounts =
errorCommandMapper.countCommandState(
start, end, projectIdArray);
// enumMap
Map<CommandType, Map<String, Integer>> dataMap = new EnumMap<>(CommandType.class);
Map<String, Integer> commonCommand = new HashMap<>();
commonCommand.put(COMMAND_STATE, 0);
commonCommand.put(ERROR_COMMAND_STATE, 0);
// init data map
/**
* START_PROCESS, START_CURRENT_TASK_PROCESS, RECOVER_TOLERANCE_FAULT_PROCESS, RECOVER_SUSPENDED_PROCESS,
START_FAILURE_TASK_PROCESS,COMPLEMENT_DATA,SCHEDULER, REPEAT_RUNNING,PAUSE,STOP,RECOVER_WAITTING_THREAD;
*/
dataMap.put(CommandType.START_PROCESS, commonCommand);
dataMap.put(CommandType.START_CURRENT_TASK_PROCESS, commonCommand);
dataMap.put(CommandType.RECOVER_TOLERANCE_FAULT_PROCESS, commonCommand);
dataMap.put(CommandType.RECOVER_SUSPENDED_PROCESS, commonCommand);
dataMap.put(CommandType.START_FAILURE_TASK_PROCESS, commonCommand);
dataMap.put(CommandType.COMPLEMENT_DATA, commonCommand);
dataMap.put(CommandType.SCHEDULER, commonCommand);
dataMap.put(CommandType.REPEAT_RUNNING, commonCommand);
dataMap.put(CommandType.PAUSE, commonCommand);
dataMap.put(CommandType.STOP, commonCommand);
dataMap.put(CommandType.RECOVER_WAITTING_THREAD, commonCommand);
// put command state
for (CommandCount executeStatusCount : commandStateCounts) {
Map<String, Integer> commandStateCountsMap = new HashMap<>(dataMap.get(executeStatusCount.getCommandType()));
commandStateCountsMap.put(COMMAND_STATE, executeStatusCount.getCount());
dataMap.put(executeStatusCount.getCommandType(), commandStateCountsMap);
}
// put error command state
for (CommandCount errorExecutionStatus : errorCommandStateCounts) {
Map<String, Integer> errorCommandStateCountsMap = new HashMap<>(dataMap.get(errorExecutionStatus.getCommandType()));
errorCommandStateCountsMap.put(ERROR_COMMAND_STATE, errorExecutionStatus.getCount());
dataMap.put(errorExecutionStatus.getCommandType(), errorCommandStateCountsMap);
}
List<CommandStateCount> list = new ArrayList<>();
for (Map.Entry<CommandType, Map<String, Integer>> next : dataMap.entrySet()) {
CommandStateCount commandStateCount = new CommandStateCount(next.getValue().get(ERROR_COMMAND_STATE),
next.getValue().get(COMMAND_STATE), next.getKey());
list.add(commandStateCount);
}
result.put(Constants.DATA_LIST, list);
putMsg(result, Status.SUCCESS);
return result;
}
private Integer[] getProjectIdsArrays(User loginUser, int projectId) {
List<Integer> projectIds = new ArrayList<>();
if (projectId != 0) {
projectIds.add(projectId);
} else if (loginUser.getUserType() == UserType.GENERAL_USER) {
projectIds = processService.getProjectIdListHavePerm(loginUser.getId());
if (projectIds.isEmpty()) {
projectIds.add(0);
}
}
return projectIds.toArray(new Integer[0]);
}
/**
* count queue state
*
* @param loginUser login user
* @param projectId project id
* @return queue state count data
*/
public Map<String, Object> countQueueState(User loginUser, int projectId) {
Map<String, Object> result = new HashMap<>(5);
boolean checkProject = checkProject(loginUser, projectId, result);
if (!checkProject) {
return result;
}
// TODO tasksQueueList and tasksKillList is never updated.
List<String> tasksQueueList = new ArrayList<>();
List<String> tasksKillList = new ArrayList<>();
Map<String, Integer> dataMap = new HashMap<>();
if (loginUser.getUserType() == UserType.ADMIN_USER) {
dataMap.put("taskQueue", tasksQueueList.size());
dataMap.put("taskKill", tasksKillList.size());
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
int[] tasksQueueIds = new int[tasksQueueList.size()];
int[] tasksKillIds = new int[tasksKillList.size()];
int i = 0;
for (String taskQueueStr : tasksQueueList) {
if (StringUtils.isNotEmpty(taskQueueStr)) {
String[] splits = taskQueueStr.split("_");
if (splits.length >= 4) {
tasksQueueIds[i++] = Integer.parseInt(splits[3]);
}
}
}
i = 0;
for (String taskKillStr : tasksKillList) {
if (StringUtils.isNotEmpty(taskKillStr)) {
String[] splits = taskKillStr.split("-");
if (splits.length == 2) {
tasksKillIds[i++] = Integer.parseInt(splits[1]);
}
}
}
Integer taskQueueCount = 0;
Integer taskKillCount = 0;
Integer[] projectIds = getProjectIdsArrays(loginUser, projectId);
if (tasksQueueIds.length != 0) {
taskQueueCount = taskInstanceMapper.countTask(
projectIds,
tasksQueueIds);
}
if (tasksKillIds.length != 0) {
taskKillCount = taskInstanceMapper.countTask(projectIds, tasksKillIds);
}
dataMap.put("taskQueue", taskQueueCount);
dataMap.put("taskKill", taskKillCount);
result.put(Constants.DATA_LIST, dataMap);
putMsg(result, Status.SUCCESS);
return result;
}
private boolean checkProject(User loginUser, int projectId, Map<String, Object> result) {
if (projectId != 0) {
Project project = projectMapper.selectById(projectId);
return projectService.hasProjectAndPerm(loginUser, project, result);
}
return true;
}
private void putErrorRequestParamsMsg(Map<String, Object> result) {
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR);
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), "startDate,endDate"));
}
}

33
dolphinscheduler-api/src/test/java/org/apache/dolphinscheduler/api/service/DataAnalysisServiceTest.java

@ -17,17 +17,28 @@
package org.apache.dolphinscheduler.api.service; package org.apache.dolphinscheduler.api.service;
import org.apache.dolphinscheduler.api.enums.Status; import org.apache.dolphinscheduler.api.enums.Status;
import org.apache.dolphinscheduler.api.service.impl.DataAnalysisServiceImpl;
import org.apache.dolphinscheduler.common.Constants; import org.apache.dolphinscheduler.common.Constants;
import org.apache.dolphinscheduler.common.enums.CommandType; import org.apache.dolphinscheduler.common.enums.CommandType;
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; import org.apache.dolphinscheduler.common.enums.ExecutionStatus;
import org.apache.dolphinscheduler.common.enums.UserType;
import org.apache.dolphinscheduler.common.utils.DateUtils; import org.apache.dolphinscheduler.common.utils.DateUtils;
import org.apache.dolphinscheduler.dao.entity.CommandCount; import org.apache.dolphinscheduler.dao.entity.CommandCount;
import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount; import org.apache.dolphinscheduler.dao.entity.ExecuteStatusCount;
import org.apache.dolphinscheduler.dao.entity.Project; import org.apache.dolphinscheduler.dao.entity.Project;
import org.apache.dolphinscheduler.dao.entity.User; import org.apache.dolphinscheduler.dao.entity.User;
import org.apache.dolphinscheduler.dao.mapper.*; import org.apache.dolphinscheduler.dao.mapper.CommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ErrorCommandMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper;
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper;
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper;
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper;
import org.apache.dolphinscheduler.service.process.ProcessService; import org.apache.dolphinscheduler.service.process.ProcessService;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.junit.After; import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
@ -36,19 +47,13 @@ import org.junit.runner.RunWith;
import org.mockito.InjectMocks; import org.mockito.InjectMocks;
import org.mockito.Mock; import org.mockito.Mock;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner; import org.powermock.modules.junit4.PowerMockRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RunWith(PowerMockRunner.class) @RunWith(PowerMockRunner.class)
public class DataAnalysisServiceTest { public class DataAnalysisServiceTest {
@InjectMocks @InjectMocks
private DataAnalysisService dataAnalysisService; private DataAnalysisServiceImpl dataAnalysisService;
@Mock @Mock
ProjectMapper projectMapper; ProjectMapper projectMapper;
@ -71,13 +76,9 @@ public class DataAnalysisServiceTest {
@Mock @Mock
TaskInstanceMapper taskInstanceMapper; TaskInstanceMapper taskInstanceMapper;
@Mock @Mock
ProcessService processService; ProcessService processService;
private Project project;
private Map<String, Object> resultMap; private Map<String, Object> resultMap;
private User user; private User user;
@ -86,7 +87,7 @@ public class DataAnalysisServiceTest {
public void setUp() { public void setUp() {
user = new User(); user = new User();
project = new Project(); Project project = new Project();
project.setId(1); project.setId(1);
resultMap = new HashMap<>(); resultMap = new HashMap<>();
Mockito.when(projectMapper.selectById(1)).thenReturn(project); Mockito.when(projectMapper.selectById(1)).thenReturn(project);
@ -103,7 +104,6 @@ public class DataAnalysisServiceTest {
resultMap = null; resultMap = null;
} }
@Test @Test
public void testCountTaskStateByProject() { public void testCountTaskStateByProject() {
@ -124,7 +124,6 @@ public class DataAnalysisServiceTest {
} }
@Test @Test
public void testCountProcessInstanceStateByProject() { public void testCountProcessInstanceStateByProject() {
@ -148,7 +147,6 @@ public class DataAnalysisServiceTest {
Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS)); Assert.assertEquals(Status.SUCCESS, result.get(Constants.STATUS));
} }
@Test @Test
public void testCountCommandState() { public void testCountCommandState() {
@ -174,7 +172,6 @@ public class DataAnalysisServiceTest {
/** /**
* get list * get list
* @return
*/ */
private List<ExecuteStatusCount> getTaskInstanceStateCounts() { private List<ExecuteStatusCount> getTaskInstanceStateCounts() {

27
dolphinscheduler-common/src/main/java/org/apache/dolphinscheduler/common/utils/TriFunction.java

@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dolphinscheduler.common.utils;
/**
* tri function function interface
*/
@FunctionalInterface
public interface TriFunction<IN1, IN2, IN3, OUT1> {
OUT1 apply(IN1 in1, IN2 in2, IN3 in3);
}
Loading…
Cancel
Save