lenboo
4 years ago
151 changed files with 8048 additions and 5895 deletions
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,207 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.AlertGroupService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.AlertGroup; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.AlertGroupMapper; |
||||
|
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
|
||||
/** |
||||
* alert group service impl |
||||
*/ |
||||
@Service |
||||
public class AlertGroupServiceImpl extends BaseServiceImpl implements AlertGroupService { |
||||
|
||||
@Autowired |
||||
private AlertGroupMapper alertGroupMapper; |
||||
|
||||
/** |
||||
* query alert group list |
||||
* |
||||
* @return alert group list |
||||
*/ |
||||
public Map<String, Object> queryAlertgroup() { |
||||
|
||||
HashMap<String, Object> result = new HashMap<>(); |
||||
List<AlertGroup> alertGroups = alertGroupMapper.queryAllGroupList(); |
||||
result.put(Constants.DATA_LIST, alertGroups); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* paging query alarm group list |
||||
* |
||||
* @param loginUser login user |
||||
* @param searchVal search value |
||||
* @param pageNo page number |
||||
* @param pageSize page size |
||||
* @return alert group list page |
||||
*/ |
||||
public Map<String, Object> listPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
Page<AlertGroup> page = new Page<>(pageNo, pageSize); |
||||
IPage<AlertGroup> alertGroupIPage = alertGroupMapper.queryAlertGroupPage( |
||||
page, searchVal); |
||||
PageInfo<AlertGroup> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
pageInfo.setTotalCount((int) alertGroupIPage.getTotal()); |
||||
pageInfo.setLists(alertGroupIPage.getRecords()); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create alert group |
||||
* |
||||
* @param loginUser login user |
||||
* @param groupName group name |
||||
* @param desc description |
||||
* @param alertInstanceIds alertInstanceIds |
||||
* @return create result code |
||||
*/ |
||||
public Map<String, Object> createAlertgroup(User loginUser, String groupName, String desc, String alertInstanceIds) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
//only admin can operate
|
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
AlertGroup alertGroup = new AlertGroup(); |
||||
Date now = new Date(); |
||||
|
||||
alertGroup.setGroupName(groupName); |
||||
alertGroup.setAlertInstanceIds(alertInstanceIds); |
||||
alertGroup.setDescription(desc); |
||||
alertGroup.setCreateTime(now); |
||||
alertGroup.setUpdateTime(now); |
||||
alertGroup.setCreateUserId(loginUser.getId()); |
||||
|
||||
// insert
|
||||
int insert = alertGroupMapper.insert(alertGroup); |
||||
|
||||
if (insert > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.CREATE_ALERT_GROUP_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance alert group |
||||
* |
||||
* @param loginUser login user |
||||
* @param id alert group id |
||||
* @param groupName group name |
||||
* @param desc description |
||||
* @param alertInstanceIds alertInstanceIds |
||||
* @return update result code |
||||
*/ |
||||
public Map<String, Object> updateAlertgroup(User loginUser, int id, String groupName, String desc, String alertInstanceIds) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
AlertGroup alertGroup = alertGroupMapper.selectById(id); |
||||
|
||||
if (alertGroup == null) { |
||||
putMsg(result, Status.ALERT_GROUP_NOT_EXIST); |
||||
return result; |
||||
|
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
if (StringUtils.isNotEmpty(groupName)) { |
||||
alertGroup.setGroupName(groupName); |
||||
} |
||||
alertGroup.setDescription(desc); |
||||
alertGroup.setUpdateTime(now); |
||||
alertGroup.setCreateUserId(loginUser.getId()); |
||||
alertGroup.setAlertInstanceIds(alertInstanceIds); |
||||
alertGroupMapper.updateById(alertGroup); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete alert group by id |
||||
* |
||||
* @param loginUser login user |
||||
* @param id alert group id |
||||
* @return delete result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Map<String, Object> delAlertgroupById(User loginUser, int id) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
result.put(Constants.STATUS, false); |
||||
|
||||
//only admin can operate
|
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
//check exist
|
||||
AlertGroup alertGroup = alertGroupMapper.selectById(id); |
||||
if (alertGroup == null) { |
||||
putMsg(result, Status.ALERT_GROUP_NOT_EXIST); |
||||
return result; |
||||
} |
||||
alertGroupMapper.deleteById(id); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify group name exists |
||||
* |
||||
* @param groupName group name |
||||
* @return check result code |
||||
*/ |
||||
public boolean existGroupName(String groupName) { |
||||
List<AlertGroup> alertGroup = alertGroupMapper.queryByGroupName(groupName); |
||||
return CollectionUtils.isNotEmpty(alertGroup); |
||||
} |
||||
} |
@ -0,0 +1,136 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.BaseService; |
||||
import org.apache.dolphinscheduler.api.utils.Result; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.UserType; |
||||
import org.apache.dolphinscheduler.common.utils.HadoopUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
|
||||
import java.io.IOException; |
||||
import java.text.MessageFormat; |
||||
import java.util.Map; |
||||
|
||||
/** |
||||
* base service impl |
||||
*/ |
||||
public class BaseServiceImpl implements BaseService { |
||||
|
||||
/** |
||||
* check admin |
||||
* |
||||
* @param user input user |
||||
* @return ture if administrator, otherwise return false |
||||
*/ |
||||
public boolean isAdmin(User user) { |
||||
return user.getUserType() == UserType.ADMIN_USER; |
||||
} |
||||
|
||||
/** |
||||
* isNotAdmin |
||||
* |
||||
* @param loginUser login user |
||||
* @param result result code |
||||
* @return true if not administrator, otherwise false |
||||
*/ |
||||
public boolean isNotAdmin(User loginUser, Map<String, Object> result) { |
||||
//only admin can operate
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* put message to map |
||||
* |
||||
* @param result result code |
||||
* @param status status |
||||
* @param statusParams status message |
||||
*/ |
||||
public void putMsg(Map<String, Object> result, Status status, Object... statusParams) { |
||||
result.put(Constants.STATUS, status); |
||||
if (statusParams != null && statusParams.length > 0) { |
||||
result.put(Constants.MSG, MessageFormat.format(status.getMsg(), statusParams)); |
||||
} else { |
||||
result.put(Constants.MSG, status.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* put message to result object |
||||
* |
||||
* @param result result code |
||||
* @param status status |
||||
* @param statusParams status message |
||||
*/ |
||||
public void putMsg(Result result, Status status, Object... statusParams) { |
||||
result.setCode(status.getCode()); |
||||
if (statusParams != null && statusParams.length > 0) { |
||||
result.setMsg(MessageFormat.format(status.getMsg(), statusParams)); |
||||
} else { |
||||
result.setMsg(status.getMsg()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* check |
||||
* |
||||
* @param result result |
||||
* @param bool bool |
||||
* @param userNoOperationPerm status |
||||
* @return check result |
||||
*/ |
||||
public boolean check(Map<String, Object> result, boolean bool, Status userNoOperationPerm) { |
||||
// only admin can operate
|
||||
if (bool) { |
||||
result.put(Constants.STATUS, userNoOperationPerm); |
||||
result.put(Constants.MSG, userNoOperationPerm.getMsg()); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* create tenant dir if not exists |
||||
* |
||||
* @param tenantCode tenant code |
||||
* @throws IOException if hdfs operation exception |
||||
*/ |
||||
public void createTenantDirIfNotExists(String tenantCode) throws IOException { |
||||
String resourcePath = HadoopUtils.getHdfsResDir(tenantCode); |
||||
String udfsPath = HadoopUtils.getHdfsUdfDir(tenantCode); |
||||
// init resource path and udf path
|
||||
HadoopUtils.getInstance().mkdir(resourcePath); |
||||
HadoopUtils.getInstance().mkdir(udfsPath); |
||||
} |
||||
|
||||
/** |
||||
* has perm |
||||
* |
||||
* @param operateUser operate user |
||||
* @param createUserId create user id |
||||
*/ |
||||
public boolean hasPerm(User operateUser, int createUserId) { |
||||
return operateUser.getId() == createUserId || isAdmin(operateUser); |
||||
} |
||||
} |
@ -0,0 +1,659 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.DataSourceService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.api.utils.Result; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.DbConnectType; |
||||
import org.apache.dolphinscheduler.common.enums.DbType; |
||||
import org.apache.dolphinscheduler.common.utils.CommonUtils; |
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.datasource.BaseDataSource; |
||||
import org.apache.dolphinscheduler.dao.datasource.DataSourceFactory; |
||||
import org.apache.dolphinscheduler.dao.datasource.OracleDataSource; |
||||
import org.apache.dolphinscheduler.dao.entity.DataSource; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.DataSourceMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.DataSourceUserMapper; |
||||
|
||||
import java.sql.Connection; |
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.HashSet; |
||||
import java.util.LinkedHashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Set; |
||||
|
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
import com.fasterxml.jackson.databind.node.ObjectNode; |
||||
|
||||
/** |
||||
* data source service impl |
||||
*/ |
||||
@Service |
||||
public class DataSourceServiceImpl extends BaseServiceImpl implements DataSourceService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DataSourceServiceImpl.class); |
||||
|
||||
public static final String NAME = "name"; |
||||
public static final String NOTE = "note"; |
||||
public static final String TYPE = "type"; |
||||
public static final String HOST = "host"; |
||||
public static final String PORT = "port"; |
||||
public static final String PRINCIPAL = "principal"; |
||||
public static final String DATABASE = "database"; |
||||
public static final String USER_NAME = "userName"; |
||||
public static final String OTHER = "other"; |
||||
|
||||
@Autowired |
||||
private DataSourceMapper dataSourceMapper; |
||||
|
||||
@Autowired |
||||
private DataSourceUserMapper datasourceUserMapper; |
||||
|
||||
/** |
||||
* create data source |
||||
* |
||||
* @param loginUser login user |
||||
* @param name data source name |
||||
* @param desc data source description |
||||
* @param type data source type |
||||
* @param parameter datasource parameters |
||||
* @return create result code |
||||
*/ |
||||
public Result<Object> createDataSource(User loginUser, String name, String desc, DbType type, String parameter) { |
||||
|
||||
Result<Object> result = new Result<>(); |
||||
// check name can use or not
|
||||
if (checkName(name)) { |
||||
putMsg(result, Status.DATASOURCE_EXIST); |
||||
return result; |
||||
} |
||||
Result<Object> isConnection = checkConnection(type, parameter); |
||||
if (Status.SUCCESS.getCode() != isConnection.getCode()) { |
||||
return result; |
||||
} |
||||
|
||||
// build datasource
|
||||
DataSource dataSource = new DataSource(); |
||||
Date now = new Date(); |
||||
|
||||
dataSource.setName(name.trim()); |
||||
dataSource.setNote(desc); |
||||
dataSource.setUserId(loginUser.getId()); |
||||
dataSource.setUserName(loginUser.getUserName()); |
||||
dataSource.setType(type); |
||||
dataSource.setConnectionParams(parameter); |
||||
dataSource.setCreateTime(now); |
||||
dataSource.setUpdateTime(now); |
||||
dataSourceMapper.insert(dataSource); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance datasource |
||||
* |
||||
* @param loginUser login user |
||||
* @param name data source name |
||||
* @param desc data source description |
||||
* @param type data source type |
||||
* @param parameter datasource parameters |
||||
* @param id data source id |
||||
* @return update result code |
||||
*/ |
||||
public Result<Object> updateDataSource(int id, User loginUser, String name, String desc, DbType type, String parameter) { |
||||
|
||||
Result<Object> result = new Result<>(); |
||||
// determine whether the data source exists
|
||||
DataSource dataSource = dataSourceMapper.selectById(id); |
||||
if (dataSource == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
if (!hasPerm(loginUser, dataSource.getUserId())) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
//check name can use or not
|
||||
if (!name.trim().equals(dataSource.getName()) && checkName(name)) { |
||||
putMsg(result, Status.DATASOURCE_EXIST); |
||||
return result; |
||||
} |
||||
//check password,if the password is not updated, set to the old password.
|
||||
ObjectNode paramObject = JSONUtils.parseObject(parameter); |
||||
String password = paramObject.path(Constants.PASSWORD).asText(); |
||||
if (StringUtils.isBlank(password)) { |
||||
String oldConnectionParams = dataSource.getConnectionParams(); |
||||
ObjectNode oldParams = JSONUtils.parseObject(oldConnectionParams); |
||||
paramObject.put(Constants.PASSWORD, oldParams.path(Constants.PASSWORD).asText()); |
||||
} |
||||
// connectionParams json
|
||||
String connectionParams = paramObject.toString(); |
||||
|
||||
Result<Object> isConnection = checkConnection(type, parameter); |
||||
if (Status.SUCCESS.getCode() != isConnection.getCode()) { |
||||
return result; |
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
dataSource.setName(name.trim()); |
||||
dataSource.setNote(desc); |
||||
dataSource.setUserName(loginUser.getUserName()); |
||||
dataSource.setType(type); |
||||
dataSource.setConnectionParams(connectionParams); |
||||
dataSource.setUpdateTime(now); |
||||
dataSourceMapper.updateById(dataSource); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
private boolean checkName(String name) { |
||||
List<DataSource> queryDataSource = dataSourceMapper.queryDataSourceByName(name.trim()); |
||||
return queryDataSource != null && !queryDataSource.isEmpty(); |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance datasource |
||||
* |
||||
* @param id datasource id |
||||
* @return data source detail |
||||
*/ |
||||
public Map<String, Object> queryDataSource(int id) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
DataSource dataSource = dataSourceMapper.selectById(id); |
||||
if (dataSource == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
// type
|
||||
String dataSourceType = dataSource.getType().toString(); |
||||
// name
|
||||
String dataSourceName = dataSource.getName(); |
||||
// desc
|
||||
String desc = dataSource.getNote(); |
||||
// parameter
|
||||
String parameter = dataSource.getConnectionParams(); |
||||
|
||||
BaseDataSource datasourceForm = DataSourceFactory.getDatasource(dataSource.getType(), parameter); |
||||
DbConnectType connectType = null; |
||||
String hostSeperator = Constants.DOUBLE_SLASH; |
||||
if (DbType.ORACLE.equals(dataSource.getType())) { |
||||
connectType = ((OracleDataSource) datasourceForm).getConnectType(); |
||||
if (DbConnectType.ORACLE_SID.equals(connectType)) { |
||||
hostSeperator = Constants.AT_SIGN; |
||||
} |
||||
} |
||||
String database = datasourceForm.getDatabase(); |
||||
// jdbc connection params
|
||||
String other = datasourceForm.getOther(); |
||||
String address = datasourceForm.getAddress(); |
||||
|
||||
String[] hostsPorts = getHostsAndPort(address, hostSeperator); |
||||
// ip host
|
||||
String host = hostsPorts[0]; |
||||
// prot
|
||||
String port = hostsPorts[1]; |
||||
String separator = ""; |
||||
|
||||
switch (dataSource.getType()) { |
||||
case HIVE: |
||||
case SQLSERVER: |
||||
separator = ";"; |
||||
break; |
||||
case MYSQL: |
||||
case POSTGRESQL: |
||||
case CLICKHOUSE: |
||||
case ORACLE: |
||||
case PRESTO: |
||||
separator = "&"; |
||||
break; |
||||
default: |
||||
separator = "&"; |
||||
break; |
||||
} |
||||
|
||||
Map<String, String> otherMap = new LinkedHashMap<>(); |
||||
if (other != null) { |
||||
String[] configs = other.split(separator); |
||||
for (String config : configs) { |
||||
otherMap.put(config.split("=")[0], config.split("=")[1]); |
||||
} |
||||
|
||||
} |
||||
|
||||
Map<String, Object> map = new HashMap<>(); |
||||
map.put(NAME, dataSourceName); |
||||
map.put(NOTE, desc); |
||||
map.put(TYPE, dataSourceType); |
||||
if (connectType != null) { |
||||
map.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); |
||||
} |
||||
|
||||
map.put(HOST, host); |
||||
map.put(PORT, port); |
||||
map.put(PRINCIPAL, datasourceForm.getPrincipal()); |
||||
map.put(Constants.KERBEROS_KRB5_CONF_PATH, datasourceForm.getJavaSecurityKrb5Conf()); |
||||
map.put(Constants.KERBEROS_KEY_TAB_USERNAME, datasourceForm.getLoginUserKeytabUsername()); |
||||
map.put(Constants.KERBEROS_KEY_TAB_PATH, datasourceForm.getLoginUserKeytabPath()); |
||||
map.put(DATABASE, database); |
||||
map.put(USER_NAME, datasourceForm.getUser()); |
||||
map.put(OTHER, otherMap); |
||||
result.put(Constants.DATA_LIST, map); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query datasource list by keyword |
||||
* |
||||
* @param loginUser login user |
||||
* @param searchVal search value |
||||
* @param pageNo page number |
||||
* @param pageSize page size |
||||
* @return data source list page |
||||
*/ |
||||
public Map<String, Object> queryDataSourceListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
IPage<DataSource> dataSourceList; |
||||
Page<DataSource> dataSourcePage = new Page<>(pageNo, pageSize); |
||||
|
||||
if (isAdmin(loginUser)) { |
||||
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, 0, searchVal); |
||||
} else { |
||||
dataSourceList = dataSourceMapper.selectPaging(dataSourcePage, loginUser.getId(), searchVal); |
||||
} |
||||
|
||||
List<DataSource> dataSources = dataSourceList != null ? dataSourceList.getRecords() : new ArrayList<>(); |
||||
handlePasswd(dataSources); |
||||
PageInfo<DataSource> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
pageInfo.setTotalCount((int) (dataSourceList != null ? dataSourceList.getTotal() : 0L)); |
||||
pageInfo.setLists(dataSources); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* handle datasource connection password for safety |
||||
* |
||||
* @param dataSourceList |
||||
*/ |
||||
private void handlePasswd(List<DataSource> dataSourceList) { |
||||
for (DataSource dataSource : dataSourceList) { |
||||
String connectionParams = dataSource.getConnectionParams(); |
||||
ObjectNode object = JSONUtils.parseObject(connectionParams); |
||||
object.put(Constants.PASSWORD, getHiddenPassword()); |
||||
dataSource.setConnectionParams(object.toString()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* get hidden password (resolve the security hotspot) |
||||
* |
||||
* @return hidden password |
||||
*/ |
||||
private String getHiddenPassword() { |
||||
return Constants.XXXXXX; |
||||
} |
||||
|
||||
/** |
||||
* query data resource list |
||||
* |
||||
* @param loginUser login user |
||||
* @param type data source type |
||||
* @return data source list page |
||||
*/ |
||||
public Map<String, Object> queryDataSourceList(User loginUser, Integer type) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
List<DataSource> datasourceList; |
||||
|
||||
if (isAdmin(loginUser)) { |
||||
datasourceList = dataSourceMapper.listAllDataSourceByType(type); |
||||
} else { |
||||
datasourceList = dataSourceMapper.queryDataSourceByType(loginUser.getId(), type); |
||||
} |
||||
|
||||
result.put(Constants.DATA_LIST, datasourceList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify datasource exists |
||||
* |
||||
* @param name datasource name |
||||
* @return true if data datasource not exists, otherwise return false |
||||
*/ |
||||
public Result<Object> verifyDataSourceName(String name) { |
||||
Result<Object> result = new Result<>(); |
||||
List<DataSource> dataSourceList = dataSourceMapper.queryDataSourceByName(name); |
||||
if (dataSourceList != null && !dataSourceList.isEmpty()) { |
||||
putMsg(result, Status.DATASOURCE_EXIST); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check connection |
||||
* |
||||
* @param type data source type |
||||
* @param parameter data source parameters |
||||
* @return true if connect successfully, otherwise false |
||||
*/ |
||||
public Result<Object> checkConnection(DbType type, String parameter) { |
||||
Result<Object> result = new Result<>(); |
||||
BaseDataSource datasource = DataSourceFactory.getDatasource(type, parameter); |
||||
if (datasource == null) { |
||||
putMsg(result, Status.DATASOURCE_TYPE_NOT_EXIST, type); |
||||
return result; |
||||
} |
||||
try (Connection connection = datasource.getConnection()) { |
||||
if (connection == null) { |
||||
putMsg(result, Status.CONNECTION_TEST_FAILURE); |
||||
return result; |
||||
} |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} catch (Exception e) { |
||||
logger.error("datasource test connection error, dbType:{}, jdbcUrl:{}, message:{}.", type, datasource.getJdbcUrl(), e.getMessage()); |
||||
return new Result<>(Status.CONNECTION_TEST_FAILURE.getCode(),e.getMessage()); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* test connection |
||||
* |
||||
* @param id datasource id |
||||
* @return connect result code |
||||
*/ |
||||
public Result<Object> connectionTest(int id) { |
||||
DataSource dataSource = dataSourceMapper.selectById(id); |
||||
if (dataSource == null) { |
||||
Result<Object> result = new Result<>(); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
return checkConnection(dataSource.getType(), dataSource.getConnectionParams()); |
||||
} |
||||
|
||||
/** |
||||
* build paramters |
||||
* |
||||
* @param type data source type |
||||
* @param host data source host |
||||
* @param port data source port |
||||
* @param database data source database name |
||||
* @param userName user name |
||||
* @param password password |
||||
* @param other other parameters |
||||
* @param principal principal |
||||
* @return datasource parameter |
||||
*/ |
||||
public String buildParameter(DbType type, String host, |
||||
String port, String database, String principal, String userName, |
||||
String password, DbConnectType connectType, String other, |
||||
String javaSecurityKrb5Conf, String loginUserKeytabUsername, String loginUserKeytabPath) { |
||||
|
||||
String address = buildAddress(type, host, port, connectType); |
||||
Map<String, Object> parameterMap = new LinkedHashMap<>(); |
||||
String jdbcUrl; |
||||
if (DbType.SQLSERVER == type) { |
||||
jdbcUrl = address + ";databaseName=" + database; |
||||
} else { |
||||
jdbcUrl = address + "/" + database; |
||||
} |
||||
|
||||
if (Constants.ORACLE.equals(type.name())) { |
||||
parameterMap.put(Constants.ORACLE_DB_CONNECT_TYPE, connectType); |
||||
} |
||||
|
||||
if (CommonUtils.getKerberosStartupState() |
||||
&& (type == DbType.HIVE || type == DbType.SPARK)) { |
||||
jdbcUrl += ";principal=" + principal; |
||||
} |
||||
|
||||
String separator = ""; |
||||
if (Constants.MYSQL.equals(type.name()) |
||||
|| Constants.POSTGRESQL.equals(type.name()) |
||||
|| Constants.CLICKHOUSE.equals(type.name()) |
||||
|| Constants.ORACLE.equals(type.name()) |
||||
|| Constants.PRESTO.equals(type.name())) { |
||||
separator = "&"; |
||||
} else if (Constants.HIVE.equals(type.name()) |
||||
|| Constants.SPARK.equals(type.name()) |
||||
|| Constants.DB2.equals(type.name()) |
||||
|| Constants.SQLSERVER.equals(type.name())) { |
||||
separator = ";"; |
||||
} |
||||
|
||||
parameterMap.put(TYPE, connectType); |
||||
parameterMap.put(Constants.ADDRESS, address); |
||||
parameterMap.put(Constants.DATABASE, database); |
||||
parameterMap.put(Constants.JDBC_URL, jdbcUrl); |
||||
parameterMap.put(Constants.USER, userName); |
||||
parameterMap.put(Constants.PASSWORD, CommonUtils.encodePassword(password)); |
||||
if (CommonUtils.getKerberosStartupState() |
||||
&& (type == DbType.HIVE || type == DbType.SPARK)) { |
||||
parameterMap.put(Constants.PRINCIPAL, principal); |
||||
parameterMap.put(Constants.KERBEROS_KRB5_CONF_PATH, javaSecurityKrb5Conf); |
||||
parameterMap.put(Constants.KERBEROS_KEY_TAB_USERNAME, loginUserKeytabUsername); |
||||
parameterMap.put(Constants.KERBEROS_KEY_TAB_PATH, loginUserKeytabPath); |
||||
} |
||||
|
||||
Map<String, String> map = JSONUtils.toMap(other); |
||||
if (map != null) { |
||||
StringBuilder otherSb = new StringBuilder(); |
||||
for (Map.Entry<String, String> entry: map.entrySet()) { |
||||
otherSb.append(String.format("%s=%s%s", entry.getKey(), entry.getValue(), separator)); |
||||
} |
||||
if (!Constants.DB2.equals(type.name())) { |
||||
otherSb.deleteCharAt(otherSb.length() - 1); |
||||
} |
||||
parameterMap.put(Constants.OTHER, otherSb); |
||||
} |
||||
|
||||
if (logger.isDebugEnabled()) { |
||||
logger.info("parameters map:{}", JSONUtils.toJsonString(parameterMap)); |
||||
} |
||||
return JSONUtils.toJsonString(parameterMap); |
||||
|
||||
} |
||||
|
||||
private String buildAddress(DbType type, String host, String port, DbConnectType connectType) { |
||||
StringBuilder sb = new StringBuilder(); |
||||
if (Constants.MYSQL.equals(type.name())) { |
||||
sb.append(Constants.JDBC_MYSQL); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.POSTGRESQL.equals(type.name())) { |
||||
sb.append(Constants.JDBC_POSTGRESQL); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.HIVE.equals(type.name()) || Constants.SPARK.equals(type.name())) { |
||||
sb.append(Constants.JDBC_HIVE_2); |
||||
String[] hostArray = host.split(","); |
||||
if (hostArray.length > 0) { |
||||
for (String zkHost : hostArray) { |
||||
sb.append(String.format("%s:%s,", zkHost, port)); |
||||
} |
||||
sb.deleteCharAt(sb.length() - 1); |
||||
} |
||||
} else if (Constants.CLICKHOUSE.equals(type.name())) { |
||||
sb.append(Constants.JDBC_CLICKHOUSE); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.ORACLE.equals(type.name())) { |
||||
if (connectType == DbConnectType.ORACLE_SID) { |
||||
sb.append(Constants.JDBC_ORACLE_SID); |
||||
} else { |
||||
sb.append(Constants.JDBC_ORACLE_SERVICE_NAME); |
||||
} |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.SQLSERVER.equals(type.name())) { |
||||
sb.append(Constants.JDBC_SQLSERVER); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.DB2.equals(type.name())) { |
||||
sb.append(Constants.JDBC_DB2); |
||||
sb.append(host).append(":").append(port); |
||||
} else if (Constants.PRESTO.equals(type.name())) { |
||||
sb.append(Constants.JDBC_PRESTO); |
||||
sb.append(host).append(":").append(port); |
||||
} |
||||
|
||||
return sb.toString(); |
||||
} |
||||
|
||||
/** |
||||
* delete datasource |
||||
* |
||||
* @param loginUser login user |
||||
* @param datasourceId data source id |
||||
* @return delete result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Result<Object> delete(User loginUser, int datasourceId) { |
||||
Result<Object> result = new Result<>(); |
||||
try { |
||||
//query datasource by id
|
||||
DataSource dataSource = dataSourceMapper.selectById(datasourceId); |
||||
if (dataSource == null) { |
||||
logger.error("resource id {} not exist", datasourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
if (!hasPerm(loginUser, dataSource.getUserId())) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
dataSourceMapper.deleteById(datasourceId); |
||||
datasourceUserMapper.deleteByDatasourceId(datasourceId); |
||||
putMsg(result, Status.SUCCESS); |
||||
} catch (Exception e) { |
||||
logger.error("delete datasource error", e); |
||||
throw new RuntimeException("delete datasource error"); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* unauthorized datasource |
||||
* |
||||
* @param loginUser login user |
||||
* @param userId user id |
||||
* @return unauthed data source result code |
||||
*/ |
||||
public Map<String, Object> unauthDatasource(User loginUser, Integer userId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
//only admin operate
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query all data sources except userId |
||||
*/ |
||||
List<DataSource> resultList = new ArrayList<>(); |
||||
List<DataSource> datasourceList = dataSourceMapper.queryDatasourceExceptUserId(userId); |
||||
Set<DataSource> datasourceSet = null; |
||||
if (datasourceList != null && !datasourceList.isEmpty()) { |
||||
datasourceSet = new HashSet<>(datasourceList); |
||||
|
||||
List<DataSource> authedDataSourceList = dataSourceMapper.queryAuthedDatasource(userId); |
||||
|
||||
Set<DataSource> authedDataSourceSet = null; |
||||
if (authedDataSourceList != null && !authedDataSourceList.isEmpty()) { |
||||
authedDataSourceSet = new HashSet<>(authedDataSourceList); |
||||
datasourceSet.removeAll(authedDataSourceSet); |
||||
|
||||
} |
||||
resultList = new ArrayList<>(datasourceSet); |
||||
} |
||||
result.put(Constants.DATA_LIST, resultList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* authorized datasource |
||||
* |
||||
* @param loginUser login user |
||||
* @param userId user id |
||||
* @return authorized result code |
||||
*/ |
||||
public Map<String, Object> authedDatasource(User loginUser, Integer userId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
if (!isAdmin(loginUser)) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
List<DataSource> authedDatasourceList = dataSourceMapper.queryAuthedDatasource(userId); |
||||
result.put(Constants.DATA_LIST, authedDatasourceList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get host and port by address |
||||
* |
||||
* @param address address |
||||
* @param separator separator |
||||
* @return sting array: [host,port] |
||||
*/ |
||||
private String[] getHostsAndPort(String address, String separator) { |
||||
String[] result = new String[2]; |
||||
String[] tmpArray = address.split(separator); |
||||
String hostsAndPorts = tmpArray[tmpArray.length - 1]; |
||||
StringBuilder hosts = new StringBuilder(); |
||||
String[] hostPortArray = hostsAndPorts.split(Constants.COMMA); |
||||
String port = hostPortArray[0].split(Constants.COLON)[1]; |
||||
for (String hostPort : hostPortArray) { |
||||
hosts.append(hostPort.split(Constants.COLON)[0]).append(Constants.COMMA); |
||||
} |
||||
hosts.deleteCharAt(hosts.length() - 1); |
||||
result[0] = hosts.toString(); |
||||
result[1] = port; |
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,578 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE; |
||||
import static org.apache.dolphinscheduler.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE; |
||||
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_RECOVER_PROCESS_ID_STRING; |
||||
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_NODE_NAMES; |
||||
import static org.apache.dolphinscheduler.common.Constants.CMD_PARAM_START_PARAMS; |
||||
import static org.apache.dolphinscheduler.common.Constants.MAX_TASK_TIMEOUT; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.ExecuteType; |
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.ExecutorService; |
||||
import org.apache.dolphinscheduler.api.service.MonitorService; |
||||
import org.apache.dolphinscheduler.api.service.ProjectService; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.CommandType; |
||||
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; |
||||
import org.apache.dolphinscheduler.common.enums.FailureStrategy; |
||||
import org.apache.dolphinscheduler.common.enums.Priority; |
||||
import org.apache.dolphinscheduler.common.enums.ReleaseState; |
||||
import org.apache.dolphinscheduler.common.enums.RunMode; |
||||
import org.apache.dolphinscheduler.common.enums.TaskDependType; |
||||
import org.apache.dolphinscheduler.common.enums.WarningType; |
||||
import org.apache.dolphinscheduler.common.model.Server; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.DateUtils; |
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.Command; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; |
||||
import org.apache.dolphinscheduler.dao.entity.Project; |
||||
import org.apache.dolphinscheduler.dao.entity.Schedule; |
||||
import org.apache.dolphinscheduler.dao.entity.Tenant; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; |
||||
import org.apache.dolphinscheduler.service.process.ProcessService; |
||||
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.LinkedList; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
/** |
||||
* executor service impl |
||||
*/ |
||||
@Service |
||||
public class ExecutorServiceImpl extends BaseServiceImpl implements ExecutorService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ExecutorServiceImpl.class); |
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionMapper processDefinitionMapper; |
||||
|
||||
@Autowired |
||||
private MonitorService monitorService; |
||||
|
||||
|
||||
@Autowired |
||||
private ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
|
||||
@Autowired |
||||
private ProcessService processService; |
||||
|
||||
/** |
||||
* execute process instance |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processDefinitionId process Definition Id |
||||
* @param cronTime cron time |
||||
* @param commandType command type |
||||
* @param failureStrategy failuer strategy |
||||
* @param startNodeList start nodelist |
||||
* @param taskDependType node dependency type |
||||
* @param warningType warning type |
||||
* @param warningGroupId notify group id |
||||
* @param processInstancePriority process instance priority |
||||
* @param workerGroup worker group name |
||||
* @param runMode run mode |
||||
* @param timeout timeout |
||||
* @param startParams the global param values which pass to new process instance |
||||
* @return execute process instance code |
||||
*/ |
||||
public Map<String, Object> execProcessInstance(User loginUser, String projectName, |
||||
int processDefinitionId, String cronTime, CommandType commandType, |
||||
FailureStrategy failureStrategy, String startNodeList, |
||||
TaskDependType taskDependType, WarningType warningType, int warningGroupId, |
||||
RunMode runMode, |
||||
Priority processInstancePriority, String workerGroup, Integer timeout, |
||||
Map<String, String> startParams) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
// timeout is invalid
|
||||
if (timeout <= 0 || timeout > MAX_TASK_TIMEOUT) { |
||||
putMsg(result, Status.TASK_TIMEOUT_PARAMS_ERROR); |
||||
return result; |
||||
} |
||||
Project project = projectMapper.queryByName(projectName); |
||||
Map<String, Object> checkResultAndAuth = checkResultAndAuth(loginUser, projectName, project); |
||||
if (checkResultAndAuth != null) { |
||||
return checkResultAndAuth; |
||||
} |
||||
|
||||
// check process define release state
|
||||
ProcessDefinition processDefinition = processDefinitionMapper.selectById(processDefinitionId); |
||||
result = checkProcessDefinitionValid(processDefinition, processDefinitionId); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
if (!checkTenantSuitable(processDefinition)) { |
||||
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", |
||||
processDefinition.getId(), processDefinition.getName()); |
||||
putMsg(result, Status.TENANT_NOT_SUITABLE); |
||||
return result; |
||||
} |
||||
|
||||
// check master exists
|
||||
if (!checkMasterExists(result)) { |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create command |
||||
*/ |
||||
int create = this.createCommand(commandType, processDefinitionId, |
||||
taskDependType, failureStrategy, startNodeList, cronTime, warningType, loginUser.getId(), |
||||
warningGroupId, runMode, processInstancePriority, workerGroup, startParams); |
||||
|
||||
if (create > 0) { |
||||
processDefinition.setWarningGroupId(warningGroupId); |
||||
processDefinitionMapper.updateById(processDefinition); |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.START_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check whether master exists |
||||
* |
||||
* @param result result |
||||
* @return master exists return true , otherwise return false |
||||
*/ |
||||
private boolean checkMasterExists(Map<String, Object> result) { |
||||
// check master server exists
|
||||
List<Server> masterServers = monitorService.getServerListFromZK(true); |
||||
|
||||
// no master
|
||||
if (masterServers.isEmpty()) { |
||||
putMsg(result, Status.MASTER_NOT_EXISTS); |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
/** |
||||
* check whether the process definition can be executed |
||||
* |
||||
* @param processDefinition process definition |
||||
* @param processDefineId process definition id |
||||
* @return check result code |
||||
*/ |
||||
public Map<String, Object> checkProcessDefinitionValid(ProcessDefinition processDefinition, int processDefineId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (processDefinition == null) { |
||||
// check process definition exists
|
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); |
||||
} else if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { |
||||
// check process definition online
|
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefineId); |
||||
} else { |
||||
result.put(Constants.STATUS, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* do action to process instance:pause, stop, repeat, recover from pause, recover from stop |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processInstanceId process instance id |
||||
* @param executeType execute type |
||||
* @return execute result code |
||||
*/ |
||||
public Map<String, Object> execute(User loginUser, String projectName, Integer processInstanceId, ExecuteType executeType) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = checkResultAndAuth(loginUser, projectName, project); |
||||
if (checkResult != null) { |
||||
return checkResult; |
||||
} |
||||
|
||||
// check master exists
|
||||
if (!checkMasterExists(result)) { |
||||
return result; |
||||
} |
||||
|
||||
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); |
||||
if (processInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); |
||||
return result; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); |
||||
if (executeType != ExecuteType.STOP && executeType != ExecuteType.PAUSE) { |
||||
result = checkProcessDefinitionValid(processDefinition, processInstance.getProcessDefinitionId()); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
checkResult = checkExecuteType(processInstance, executeType); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
if (!checkTenantSuitable(processDefinition)) { |
||||
logger.error("there is not any valid tenant for the process definition: id:{},name:{}, ", |
||||
processDefinition.getId(), processDefinition.getName()); |
||||
putMsg(result, Status.TENANT_NOT_SUITABLE); |
||||
} |
||||
|
||||
switch (executeType) { |
||||
case REPEAT_RUNNING: |
||||
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.REPEAT_RUNNING); |
||||
break; |
||||
case RECOVER_SUSPENDED_PROCESS: |
||||
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.RECOVER_SUSPENDED_PROCESS); |
||||
break; |
||||
case START_FAILURE_TASK_PROCESS: |
||||
result = insertCommand(loginUser, processInstanceId, processDefinition.getId(), CommandType.START_FAILURE_TASK_PROCESS); |
||||
break; |
||||
case STOP: |
||||
if (processInstance.getState() == ExecutionStatus.READY_STOP) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); |
||||
} else { |
||||
result = updateProcessInstancePrepare(processInstance, CommandType.STOP, ExecutionStatus.READY_STOP); |
||||
} |
||||
break; |
||||
case PAUSE: |
||||
if (processInstance.getState() == ExecutionStatus.READY_PAUSE) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_ALREADY_CHANGED, processInstance.getName(), processInstance.getState()); |
||||
} else { |
||||
result = updateProcessInstancePrepare(processInstance, CommandType.PAUSE, ExecutionStatus.READY_PAUSE); |
||||
} |
||||
break; |
||||
default: |
||||
logger.error("unknown execute type : {}", executeType); |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "unknown execute type"); |
||||
|
||||
break; |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check tenant suitable |
||||
* |
||||
* @param processDefinition process definition |
||||
* @return true if tenant suitable, otherwise return false |
||||
*/ |
||||
private boolean checkTenantSuitable(ProcessDefinition processDefinition) { |
||||
Tenant tenant = processService.getTenantForProcess(processDefinition.getTenantId(), |
||||
processDefinition.getUserId()); |
||||
return tenant != null; |
||||
} |
||||
|
||||
/** |
||||
* Check the state of process instance and the type of operation match |
||||
* |
||||
* @param processInstance process instance |
||||
* @param executeType execute type |
||||
* @return check result code |
||||
*/ |
||||
private Map<String, Object> checkExecuteType(ProcessInstance processInstance, ExecuteType executeType) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
ExecutionStatus executionStatus = processInstance.getState(); |
||||
boolean checkResult = false; |
||||
switch (executeType) { |
||||
case PAUSE: |
||||
case STOP: |
||||
if (executionStatus.typeIsRunning()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
case REPEAT_RUNNING: |
||||
if (executionStatus.typeIsFinished()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
case START_FAILURE_TASK_PROCESS: |
||||
if (executionStatus.typeIsFailure()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
case RECOVER_SUSPENDED_PROCESS: |
||||
if (executionStatus.typeIsPause() || executionStatus.typeIsCancel()) { |
||||
checkResult = true; |
||||
} |
||||
break; |
||||
default: |
||||
break; |
||||
} |
||||
if (!checkResult) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, processInstance.getName(), executionStatus.toString(), executeType.toString()); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* prepare to update process instance command type and status |
||||
* |
||||
* @param processInstance process instance |
||||
* @param commandType command type |
||||
* @param executionStatus execute status |
||||
* @return update result |
||||
*/ |
||||
private Map<String, Object> updateProcessInstancePrepare(ProcessInstance processInstance, CommandType commandType, ExecutionStatus executionStatus) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
processInstance.setCommandType(commandType); |
||||
processInstance.addHistoryCmd(commandType); |
||||
processInstance.setState(executionStatus); |
||||
int update = processService.updateProcessInstance(processInstance); |
||||
|
||||
// determine whether the process is normal
|
||||
if (update > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* insert command, used in the implementation of the page, re run, recovery (pause / failure) execution |
||||
* |
||||
* @param loginUser login user |
||||
* @param instanceId instance id |
||||
* @param processDefinitionId process definition id |
||||
* @param commandType command type |
||||
* @return insert result code |
||||
*/ |
||||
private Map<String, Object> insertCommand(User loginUser, Integer instanceId, Integer processDefinitionId, CommandType commandType) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Command command = new Command(); |
||||
command.setCommandType(commandType); |
||||
command.setProcessDefinitionId(processDefinitionId); |
||||
command.setCommandParam(String.format("{\"%s\":%d}", |
||||
CMD_PARAM_RECOVER_PROCESS_ID_STRING, instanceId)); |
||||
command.setExecutorId(loginUser.getId()); |
||||
|
||||
if (!processService.verifyIsNeedCreateCommand(command)) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_EXECUTING_COMMAND, processDefinitionId); |
||||
return result; |
||||
} |
||||
|
||||
int create = processService.createCommand(command); |
||||
|
||||
if (create > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.EXECUTE_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check if sub processes are offline before starting process definition |
||||
* |
||||
* @param processDefineId process definition id |
||||
* @return check result code |
||||
*/ |
||||
public Map<String, Object> startCheckByProcessDefinedId(int processDefineId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
if (processDefineId == 0) { |
||||
logger.error("process definition id is null"); |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, "process definition id"); |
||||
} |
||||
List<Integer> ids = new ArrayList<>(); |
||||
processService.recurseFindSubProcessId(processDefineId, ids); |
||||
Integer[] idArray = ids.toArray(new Integer[ids.size()]); |
||||
if (!ids.isEmpty()) { |
||||
List<ProcessDefinition> processDefinitionList = processDefinitionMapper.queryDefinitionListByIdList(idArray); |
||||
if (processDefinitionList != null) { |
||||
for (ProcessDefinition processDefinition : processDefinitionList) { |
||||
/** |
||||
* if there is no online process, exit directly |
||||
*/ |
||||
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); |
||||
logger.info("not release process definition id: {} , name : {}", |
||||
processDefinition.getId(), processDefinition.getName()); |
||||
return result; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create command |
||||
* |
||||
* @param commandType commandType |
||||
* @param processDefineId processDefineId |
||||
* @param nodeDep nodeDep |
||||
* @param failureStrategy failureStrategy |
||||
* @param startNodeList startNodeList |
||||
* @param schedule schedule |
||||
* @param warningType warningType |
||||
* @param executorId executorId |
||||
* @param warningGroupId warningGroupId |
||||
* @param runMode runMode |
||||
* @param processInstancePriority processInstancePriority |
||||
* @param workerGroup workerGroup |
||||
* @return command id |
||||
*/ |
||||
private int createCommand(CommandType commandType, int processDefineId, |
||||
TaskDependType nodeDep, FailureStrategy failureStrategy, |
||||
String startNodeList, String schedule, WarningType warningType, |
||||
int executorId, int warningGroupId, |
||||
RunMode runMode, Priority processInstancePriority, String workerGroup, |
||||
Map<String, String> startParams) { |
||||
|
||||
/** |
||||
* instantiate command schedule instance |
||||
*/ |
||||
Command command = new Command(); |
||||
|
||||
Map<String, String> cmdParam = new HashMap<>(); |
||||
if (commandType == null) { |
||||
command.setCommandType(CommandType.START_PROCESS); |
||||
} else { |
||||
command.setCommandType(commandType); |
||||
} |
||||
command.setProcessDefinitionId(processDefineId); |
||||
if (nodeDep != null) { |
||||
command.setTaskDependType(nodeDep); |
||||
} |
||||
if (failureStrategy != null) { |
||||
command.setFailureStrategy(failureStrategy); |
||||
} |
||||
|
||||
if (StringUtils.isNotEmpty(startNodeList)) { |
||||
cmdParam.put(CMD_PARAM_START_NODE_NAMES, startNodeList); |
||||
} |
||||
if (warningType != null) { |
||||
command.setWarningType(warningType); |
||||
} |
||||
if (startParams != null && startParams.size() > 0) { |
||||
cmdParam.put(CMD_PARAM_START_PARAMS, JSONUtils.toJsonString(startParams)); |
||||
} |
||||
command.setCommandParam(JSONUtils.toJsonString(cmdParam)); |
||||
command.setExecutorId(executorId); |
||||
command.setWarningGroupId(warningGroupId); |
||||
command.setProcessInstancePriority(processInstancePriority); |
||||
command.setWorkerGroup(workerGroup); |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
if (StringUtils.isNotEmpty(schedule)) { |
||||
String[] interval = schedule.split(","); |
||||
if (interval.length == 2) { |
||||
start = DateUtils.getScheduleDate(interval[0]); |
||||
end = DateUtils.getScheduleDate(interval[1]); |
||||
} |
||||
} |
||||
|
||||
// determine whether to complement
|
||||
if (commandType == CommandType.COMPLEMENT_DATA) { |
||||
runMode = (runMode == null) ? RunMode.RUN_MODE_SERIAL : runMode; |
||||
if (null != start && null != end && !start.after(end)) { |
||||
if (runMode == RunMode.RUN_MODE_SERIAL) { |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(end)); |
||||
command.setCommandParam(JSONUtils.toJsonString(cmdParam)); |
||||
return processService.createCommand(command); |
||||
} else if (runMode == RunMode.RUN_MODE_PARALLEL) { |
||||
List<Schedule> schedules = processService.queryReleaseSchedulerListByProcessDefinitionId(processDefineId); |
||||
List<Date> listDate = new LinkedList<>(); |
||||
if (!CollectionUtils.isEmpty(schedules)) { |
||||
for (Schedule item : schedules) { |
||||
listDate.addAll(CronUtils.getSelfFireDateList(start, end, item.getCrontab())); |
||||
} |
||||
} |
||||
if (!CollectionUtils.isEmpty(listDate)) { |
||||
// loop by schedule date
|
||||
for (Date date : listDate) { |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(date)); |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(date)); |
||||
command.setCommandParam(JSONUtils.toJsonString(cmdParam)); |
||||
processService.createCommand(command); |
||||
} |
||||
return listDate.size(); |
||||
} else { |
||||
// loop by day
|
||||
int runCunt = 0; |
||||
while (!start.after(end)) { |
||||
runCunt += 1; |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_START_DATE, DateUtils.dateToString(start)); |
||||
cmdParam.put(CMDPARAM_COMPLEMENT_DATA_END_DATE, DateUtils.dateToString(start)); |
||||
command.setCommandParam(JSONUtils.toJsonString(cmdParam)); |
||||
processService.createCommand(command); |
||||
start = DateUtils.getSomeDay(start, 1); |
||||
} |
||||
return runCunt; |
||||
} |
||||
} |
||||
} else { |
||||
logger.error("there is not valid schedule date for the process definition: id:{}", processDefineId); |
||||
} |
||||
} else { |
||||
command.setCommandParam(JSONUtils.toJsonString(cmdParam)); |
||||
return processService.createCommand(command); |
||||
} |
||||
|
||||
return 0; |
||||
} |
||||
|
||||
/** |
||||
* check result and auth |
||||
*/ |
||||
private Map<String, Object> checkResultAndAuth(User loginUser, String projectName, Project project) { |
||||
// check project auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,160 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import static org.apache.dolphinscheduler.common.utils.Preconditions.checkNotNull; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.MonitorService; |
||||
import org.apache.dolphinscheduler.api.utils.ZookeeperMonitor; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.ZKNodeType; |
||||
import org.apache.dolphinscheduler.common.model.Server; |
||||
import org.apache.dolphinscheduler.common.model.WorkerServerModel; |
||||
import org.apache.dolphinscheduler.dao.MonitorDBDao; |
||||
import org.apache.dolphinscheduler.dao.entity.MonitorRecord; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.entity.ZookeeperRecord; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.function.Function; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import com.google.common.collect.Sets; |
||||
|
||||
/** |
||||
* monitor service impl |
||||
*/ |
||||
@Service |
||||
public class MonitorServiceImpl extends BaseServiceImpl implements MonitorService { |
||||
|
||||
@Autowired |
||||
private ZookeeperMonitor zookeeperMonitor; |
||||
|
||||
@Autowired |
||||
private MonitorDBDao monitorDBDao; |
||||
|
||||
/** |
||||
* query database state |
||||
* |
||||
* @param loginUser login user |
||||
* @return data base state |
||||
*/ |
||||
public Map<String,Object> queryDatabaseState(User loginUser) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
List<MonitorRecord> monitorRecordList = monitorDBDao.queryDatabaseState(); |
||||
|
||||
result.put(Constants.DATA_LIST, monitorRecordList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query master list |
||||
* |
||||
* @param loginUser login user |
||||
* @return master information list |
||||
*/ |
||||
public Map<String,Object> queryMaster(User loginUser) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
List<Server> masterServers = getServerListFromZK(true); |
||||
result.put(Constants.DATA_LIST, masterServers); |
||||
putMsg(result,Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query zookeeper state |
||||
* |
||||
* @param loginUser login user |
||||
* @return zookeeper information list |
||||
*/ |
||||
public Map<String,Object> queryZookeeperState(User loginUser) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
List<ZookeeperRecord> zookeeperRecordList = zookeeperMonitor.zookeeperInfoList(); |
||||
|
||||
result.put(Constants.DATA_LIST, zookeeperRecordList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query worker list |
||||
* |
||||
* @param loginUser login user |
||||
* @return worker information list |
||||
*/ |
||||
public Map<String,Object> queryWorker(User loginUser) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
List<WorkerServerModel> workerServers = getServerListFromZK(false) |
||||
.stream() |
||||
.map((Server server) -> { |
||||
WorkerServerModel model = new WorkerServerModel(); |
||||
model.setId(server.getId()); |
||||
model.setHost(server.getHost()); |
||||
model.setPort(server.getPort()); |
||||
model.setZkDirectories(Sets.newHashSet(server.getZkDirectory())); |
||||
model.setResInfo(server.getResInfo()); |
||||
model.setCreateTime(server.getCreateTime()); |
||||
model.setLastHeartbeatTime(server.getLastHeartbeatTime()); |
||||
return model; |
||||
}) |
||||
.collect(Collectors.toList()); |
||||
|
||||
Map<String, WorkerServerModel> workerHostPortServerMapping = workerServers |
||||
.stream() |
||||
.collect(Collectors.toMap( |
||||
(WorkerServerModel worker) -> { |
||||
String[] s = worker.getZkDirectories().iterator().next().split("/"); |
||||
return s[s.length - 1]; |
||||
} |
||||
, Function.identity() |
||||
, (WorkerServerModel oldOne, WorkerServerModel newOne) -> { |
||||
oldOne.getZkDirectories().addAll(newOne.getZkDirectories()); |
||||
return oldOne; |
||||
})); |
||||
|
||||
result.put(Constants.DATA_LIST, workerHostPortServerMapping.values()); |
||||
putMsg(result,Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
public List<Server> getServerListFromZK(boolean isMaster) { |
||||
|
||||
checkNotNull(zookeeperMonitor); |
||||
ZKNodeType zkNodeType = isMaster ? ZKNodeType.MASTER : ZKNodeType.WORKER; |
||||
return zookeeperMonitor.getServersList(zkNodeType); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,740 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.DATA_LIST; |
||||
import static org.apache.dolphinscheduler.common.Constants.DEPENDENT_SPLIT; |
||||
import static org.apache.dolphinscheduler.common.Constants.GLOBAL_PARAMS; |
||||
import static org.apache.dolphinscheduler.common.Constants.LOCAL_PARAMS; |
||||
import static org.apache.dolphinscheduler.common.Constants.PROCESS_INSTANCE_STATE; |
||||
import static org.apache.dolphinscheduler.common.Constants.TASK_LIST; |
||||
|
||||
import org.apache.dolphinscheduler.api.dto.gantt.GanttDto; |
||||
import org.apache.dolphinscheduler.api.dto.gantt.Task; |
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.ExecutorService; |
||||
import org.apache.dolphinscheduler.api.service.LoggerService; |
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionService; |
||||
import org.apache.dolphinscheduler.api.service.ProcessDefinitionVersionService; |
||||
import org.apache.dolphinscheduler.api.service.ProcessInstanceService; |
||||
import org.apache.dolphinscheduler.api.service.ProjectService; |
||||
import org.apache.dolphinscheduler.api.service.UsersService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.api.utils.Result; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.DependResult; |
||||
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; |
||||
import org.apache.dolphinscheduler.common.enums.Flag; |
||||
import org.apache.dolphinscheduler.common.enums.TaskType; |
||||
import org.apache.dolphinscheduler.common.graph.DAG; |
||||
import org.apache.dolphinscheduler.common.model.TaskNode; |
||||
import org.apache.dolphinscheduler.common.model.TaskNodeRelation; |
||||
import org.apache.dolphinscheduler.common.process.ProcessDag; |
||||
import org.apache.dolphinscheduler.common.process.Property; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.DateUtils; |
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils; |
||||
import org.apache.dolphinscheduler.common.utils.ParameterUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.common.utils.placeholder.BusinessTimeUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessData; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessInstance; |
||||
import org.apache.dolphinscheduler.dao.entity.Project; |
||||
import org.apache.dolphinscheduler.dao.entity.TaskInstance; |
||||
import org.apache.dolphinscheduler.dao.entity.Tenant; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; |
||||
import org.apache.dolphinscheduler.dao.utils.DagHelper; |
||||
import org.apache.dolphinscheduler.service.process.ProcessService; |
||||
|
||||
import java.io.BufferedReader; |
||||
import java.io.ByteArrayInputStream; |
||||
import java.io.IOException; |
||||
import java.io.InputStreamReader; |
||||
import java.nio.charset.StandardCharsets; |
||||
import java.text.ParseException; |
||||
import java.util.ArrayList; |
||||
import java.util.Collections; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Objects; |
||||
import java.util.Optional; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
|
||||
/** |
||||
* process instance service impl |
||||
*/ |
||||
@Service |
||||
public class ProcessInstanceServiceImpl extends BaseServiceImpl implements ProcessInstanceService { |
||||
|
||||
@Autowired |
||||
ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
ProjectService projectService; |
||||
|
||||
@Autowired |
||||
ProcessService processService; |
||||
|
||||
@Autowired |
||||
ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
@Autowired |
||||
ProcessDefinitionMapper processDefineMapper; |
||||
|
||||
@Autowired |
||||
ProcessDefinitionService processDefinitionService; |
||||
|
||||
@Autowired |
||||
ProcessDefinitionVersionService processDefinitionVersionService; |
||||
|
||||
@Autowired |
||||
ExecutorService execService; |
||||
|
||||
@Autowired |
||||
TaskInstanceMapper taskInstanceMapper; |
||||
|
||||
@Autowired |
||||
LoggerService loggerService; |
||||
|
||||
|
||||
@Autowired |
||||
UsersService usersService; |
||||
|
||||
/** |
||||
* return top n SUCCESS process instance order by running time which started between startTime and endTime |
||||
*/ |
||||
public Map<String, Object> queryTopNLongestRunningProcessInstance(User loginUser, String projectName, int size, String startTime, String endTime) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
if (0 > size) { |
||||
putMsg(result, Status.NEGTIVE_SIZE_NUMBER_ERROR, size); |
||||
return result; |
||||
} |
||||
if (Objects.isNull(startTime)) { |
||||
putMsg(result, Status.DATA_IS_NULL, Constants.START_TIME); |
||||
return result; |
||||
} |
||||
Date start = DateUtils.stringToDate(startTime); |
||||
if (Objects.isNull(endTime)) { |
||||
putMsg(result, Status.DATA_IS_NULL, Constants.END_TIME); |
||||
return result; |
||||
} |
||||
Date end = DateUtils.stringToDate(endTime); |
||||
if (start == null || end == null) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE); |
||||
return result; |
||||
} |
||||
if (start.getTime() > end.getTime()) { |
||||
putMsg(result, Status.START_TIME_BIGGER_THAN_END_TIME_ERROR, startTime, endTime); |
||||
return result; |
||||
} |
||||
|
||||
List<ProcessInstance> processInstances = processInstanceMapper.queryTopNProcessInstance(size, start, end, ExecutionStatus.SUCCESS); |
||||
result.put(DATA_LIST, processInstances); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query process instance by id |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processId process instance id |
||||
* @return process instance detail |
||||
*/ |
||||
public Map<String, Object> queryProcessInstanceById(User loginUser, String projectName, Integer processId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); |
||||
|
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); |
||||
processInstance.setWarningGroupId(processDefinition.getWarningGroupId()); |
||||
result.put(DATA_LIST, processInstance); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* paging query process instance list, filtering according to project, process definition, time range, keyword, process status |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param pageNo page number |
||||
* @param pageSize page size |
||||
* @param processDefineId process definition id |
||||
* @param searchVal search value |
||||
* @param stateType state type |
||||
* @param host host |
||||
* @param startDate start time |
||||
* @param endDate end time |
||||
* @return process instance list |
||||
*/ |
||||
public Map<String, Object> queryProcessInstanceList(User loginUser, String projectName, Integer processDefineId, |
||||
String startDate, String endDate, |
||||
String searchVal, String executorName, ExecutionStatus stateType, String host, |
||||
Integer pageNo, Integer pageSize) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
int[] statusArray = null; |
||||
// filter by state
|
||||
if (stateType != null) { |
||||
statusArray = new int[]{stateType.ordinal()}; |
||||
} |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
try { |
||||
if (StringUtils.isNotEmpty(startDate)) { |
||||
start = DateUtils.getScheduleDate(startDate); |
||||
} |
||||
if (StringUtils.isNotEmpty(endDate)) { |
||||
end = DateUtils.getScheduleDate(endDate); |
||||
} |
||||
} catch (Exception e) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.START_END_DATE); |
||||
return result; |
||||
} |
||||
|
||||
Page<ProcessInstance> page = new Page<>(pageNo, pageSize); |
||||
PageInfo<ProcessInstance> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
int executorId = usersService.getUserIdByName(executorName); |
||||
|
||||
IPage<ProcessInstance> processInstanceList = |
||||
processInstanceMapper.queryProcessInstanceListPaging(page, |
||||
project.getId(), processDefineId, searchVal, executorId, statusArray, host, start, end); |
||||
|
||||
List<ProcessInstance> processInstances = processInstanceList.getRecords(); |
||||
|
||||
for (ProcessInstance processInstance : processInstances) { |
||||
processInstance.setDuration(DateUtils.format2Duration(processInstance.getStartTime(), processInstance.getEndTime())); |
||||
User executor = usersService.queryUser(processInstance.getExecutorId()); |
||||
if (null != executor) { |
||||
processInstance.setExecutorName(executor.getUserName()); |
||||
} |
||||
} |
||||
|
||||
pageInfo.setTotalCount((int) processInstanceList.getTotal()); |
||||
pageInfo.setLists(processInstances); |
||||
result.put(DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query task list by process instance id |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processId process instance id |
||||
* @return task list for the process instance |
||||
* @throws IOException io exception |
||||
*/ |
||||
public Map<String, Object> queryTaskListByProcessId(User loginUser, String projectName, Integer processId) throws IOException { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processId); |
||||
List<TaskInstance> taskInstanceList = processService.findValidTaskListByProcessId(processId); |
||||
addDependResultForTaskList(taskInstanceList); |
||||
Map<String, Object> resultMap = new HashMap<>(); |
||||
resultMap.put(PROCESS_INSTANCE_STATE, processInstance.getState().toString()); |
||||
resultMap.put(TASK_LIST, taskInstanceList); |
||||
result.put(DATA_LIST, resultMap); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* add dependent result for dependent task |
||||
*/ |
||||
private void addDependResultForTaskList(List<TaskInstance> taskInstanceList) throws IOException { |
||||
for (TaskInstance taskInstance : taskInstanceList) { |
||||
if (taskInstance.getTaskType().equalsIgnoreCase(TaskType.DEPENDENT.toString())) { |
||||
Result<String> logResult = loggerService.queryLog( |
||||
taskInstance.getId(), Constants.LOG_QUERY_SKIP_LINE_NUMBER, Constants.LOG_QUERY_LIMIT); |
||||
if (logResult.getCode() == Status.SUCCESS.ordinal()) { |
||||
String log = logResult.getData(); |
||||
Map<String, DependResult> resultMap = parseLogForDependentResult(log); |
||||
taskInstance.setDependentResult(JSONUtils.toJsonString(resultMap)); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
public Map<String, DependResult> parseLogForDependentResult(String log) throws IOException { |
||||
Map<String, DependResult> resultMap = new HashMap<>(); |
||||
if (StringUtils.isEmpty(log)) { |
||||
return resultMap; |
||||
} |
||||
|
||||
BufferedReader br = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(log.getBytes( |
||||
StandardCharsets.UTF_8)), StandardCharsets.UTF_8)); |
||||
String line; |
||||
while ((line = br.readLine()) != null) { |
||||
if (line.contains(DEPENDENT_SPLIT)) { |
||||
String[] tmpStringArray = line.split(":\\|\\|"); |
||||
if (tmpStringArray.length != 2) { |
||||
continue; |
||||
} |
||||
String dependResultString = tmpStringArray[1]; |
||||
String[] dependStringArray = dependResultString.split(","); |
||||
if (dependStringArray.length != 2) { |
||||
continue; |
||||
} |
||||
String key = dependStringArray[0].trim(); |
||||
DependResult dependResult = DependResult.valueOf(dependStringArray[1].trim()); |
||||
resultMap.put(key, dependResult); |
||||
} |
||||
} |
||||
return resultMap; |
||||
} |
||||
|
||||
/** |
||||
* query sub process instance detail info by task id |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param taskId task id |
||||
* @return sub process instance detail |
||||
*/ |
||||
public Map<String, Object> querySubProcessInstanceByTaskId(User loginUser, String projectName, Integer taskId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
TaskInstance taskInstance = processService.findTaskInstanceById(taskId); |
||||
if (taskInstance == null) { |
||||
putMsg(result, Status.TASK_INSTANCE_NOT_EXISTS, taskId); |
||||
return result; |
||||
} |
||||
if (!taskInstance.isSubProcess()) { |
||||
putMsg(result, Status.TASK_INSTANCE_NOT_SUB_WORKFLOW_INSTANCE, taskInstance.getName()); |
||||
return result; |
||||
} |
||||
|
||||
ProcessInstance subWorkflowInstance = processService.findSubProcessInstance( |
||||
taskInstance.getProcessInstanceId(), taskInstance.getId()); |
||||
if (subWorkflowInstance == null) { |
||||
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST, taskId); |
||||
return result; |
||||
} |
||||
Map<String, Object> dataMap = new HashMap<>(); |
||||
dataMap.put(Constants.SUBPROCESS_INSTANCE_ID, subWorkflowInstance.getId()); |
||||
result.put(DATA_LIST, dataMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* update process instance |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processInstanceJson process instance json |
||||
* @param processInstanceId process instance id |
||||
* @param scheduleTime schedule time |
||||
* @param syncDefine sync define |
||||
* @param flag flag |
||||
* @param locations locations |
||||
* @param connects connects |
||||
* @return update result code |
||||
* @throws ParseException parse exception for json parse |
||||
*/ |
||||
public Map<String, Object> updateProcessInstance(User loginUser, String projectName, Integer processInstanceId, |
||||
String processInstanceJson, String scheduleTime, Boolean syncDefine, |
||||
Flag flag, String locations, String connects) throws ParseException { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
//check project permission
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
//check process instance exists
|
||||
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); |
||||
if (processInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); |
||||
return result; |
||||
} |
||||
|
||||
//check process instance status
|
||||
if (!processInstance.getState().typeIsFinished()) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_STATE_OPERATION_ERROR, |
||||
processInstance.getName(), processInstance.getState().toString(), "update"); |
||||
return result; |
||||
} |
||||
Date schedule = null; |
||||
schedule = processInstance.getScheduleTime(); |
||||
if (scheduleTime != null) { |
||||
schedule = DateUtils.getScheduleDate(scheduleTime); |
||||
} |
||||
processInstance.setScheduleTime(schedule); |
||||
processInstance.setLocations(locations); |
||||
processInstance.setConnects(connects); |
||||
String globalParams = null; |
||||
String originDefParams = null; |
||||
int timeout = processInstance.getTimeout(); |
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(processInstance.getProcessDefinitionId()); |
||||
if (StringUtils.isNotEmpty(processInstanceJson)) { |
||||
ProcessData processData = JSONUtils.parseObject(processInstanceJson, ProcessData.class); |
||||
//check workflow json is valid
|
||||
Map<String, Object> checkFlowJson = processDefinitionService.checkProcessNodeList(processData, processInstanceJson); |
||||
if (checkFlowJson.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
originDefParams = JSONUtils.toJsonString(processData.getGlobalParams()); |
||||
List<Property> globalParamList = processData.getGlobalParams(); |
||||
Map<String, String> globalParamMap = Optional.ofNullable(globalParamList).orElse(Collections.emptyList()).stream().collect(Collectors.toMap(Property::getProp, Property::getValue)); |
||||
globalParams = ParameterUtils.curingGlobalParams(globalParamMap, globalParamList, |
||||
processInstance.getCmdTypeIfComplement(), schedule); |
||||
timeout = processData.getTimeout(); |
||||
processInstance.setTimeout(timeout); |
||||
Tenant tenant = processService.getTenantForProcess(processData.getTenantId(), |
||||
processDefinition.getUserId()); |
||||
if (tenant != null) { |
||||
processInstance.setTenantCode(tenant.getTenantCode()); |
||||
} |
||||
// get the processinstancejson before saving,and then save the name and taskid
|
||||
String oldJson = processInstance.getProcessInstanceJson(); |
||||
if (StringUtils.isNotEmpty(oldJson)) { |
||||
processInstanceJson = processService.changeJson(processData,oldJson); |
||||
} |
||||
processInstance.setProcessInstanceJson(processInstanceJson); |
||||
processInstance.setGlobalParams(globalParams); |
||||
} |
||||
|
||||
int update = processService.updateProcessInstance(processInstance); |
||||
int updateDefine = 1; |
||||
if (Boolean.TRUE.equals(syncDefine)) { |
||||
processDefinition.setProcessDefinitionJson(processInstanceJson); |
||||
processDefinition.setGlobalParams(originDefParams); |
||||
processDefinition.setLocations(locations); |
||||
processDefinition.setConnects(connects); |
||||
processDefinition.setTimeout(timeout); |
||||
processDefinition.setUpdateTime(new Date()); |
||||
|
||||
// add process definition version
|
||||
long version = processDefinitionVersionService.addProcessDefinitionVersion(processDefinition); |
||||
processDefinition.setVersion(version); |
||||
updateDefine = processDefineMapper.updateById(processDefinition); |
||||
} |
||||
if (update > 0 && updateDefine > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.UPDATE_PROCESS_INSTANCE_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
|
||||
} |
||||
|
||||
/** |
||||
* query parent process instance detail info by sub process instance id |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param subId sub process id |
||||
* @return parent instance detail |
||||
*/ |
||||
public Map<String, Object> queryParentInstanceBySubId(User loginUser, String projectName, Integer subId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
ProcessInstance subInstance = processService.findProcessInstanceDetailById(subId); |
||||
if (subInstance == null) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, subId); |
||||
return result; |
||||
} |
||||
if (subInstance.getIsSubProcess() == Flag.NO) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_SUB_PROCESS_INSTANCE, subInstance.getName()); |
||||
return result; |
||||
} |
||||
|
||||
ProcessInstance parentWorkflowInstance = processService.findParentProcessInstance(subId); |
||||
if (parentWorkflowInstance == null) { |
||||
putMsg(result, Status.SUB_PROCESS_INSTANCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
Map<String, Object> dataMap = new HashMap<>(); |
||||
dataMap.put(Constants.PARENT_WORKFLOW_INSTANCE, parentWorkflowInstance.getId()); |
||||
result.put(DATA_LIST, dataMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete process instance by id, at the same time,delete task instance and their mapping relation data |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processInstanceId process instance id |
||||
* @return delete result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Map<String, Object> deleteProcessInstanceById(User loginUser, String projectName, Integer processInstanceId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
ProcessInstance processInstance = processService.findProcessInstanceDetailById(processInstanceId); |
||||
if (null == processInstance) { |
||||
putMsg(result, Status.PROCESS_INSTANCE_NOT_EXIST, processInstanceId); |
||||
return result; |
||||
} |
||||
|
||||
processService.removeTaskLogFile(processInstanceId); |
||||
// delete database cascade
|
||||
int delete = processService.deleteWorkProcessInstanceById(processInstanceId); |
||||
|
||||
processService.deleteAllSubWorkProcessByParentId(processInstanceId); |
||||
processService.deleteWorkProcessMapByParentId(processInstanceId); |
||||
|
||||
if (delete > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.DELETE_PROCESS_INSTANCE_BY_ID_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* view process instance variables |
||||
* |
||||
* @param processInstanceId process instance id |
||||
* @return variables data |
||||
*/ |
||||
public Map<String, Object> viewVariables(Integer processInstanceId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); |
||||
|
||||
if (processInstance == null) { |
||||
throw new RuntimeException("workflow instance is null"); |
||||
} |
||||
|
||||
Map<String, String> timeParams = BusinessTimeUtils |
||||
.getBusinessTime(processInstance.getCmdTypeIfComplement(), |
||||
processInstance.getScheduleTime()); |
||||
|
||||
String workflowInstanceJson = processInstance.getProcessInstanceJson(); |
||||
|
||||
ProcessData workflowData = JSONUtils.parseObject(workflowInstanceJson, ProcessData.class); |
||||
|
||||
String userDefinedParams = processInstance.getGlobalParams(); |
||||
|
||||
// global params
|
||||
List<Property> globalParams = new ArrayList<>(); |
||||
|
||||
if (userDefinedParams != null && userDefinedParams.length() > 0) { |
||||
globalParams = JSONUtils.toList(userDefinedParams, Property.class); |
||||
} |
||||
|
||||
List<TaskNode> taskNodeList = workflowData.getTasks(); |
||||
|
||||
// global param string
|
||||
String globalParamStr = JSONUtils.toJsonString(globalParams); |
||||
globalParamStr = ParameterUtils.convertParameterPlaceholders(globalParamStr, timeParams); |
||||
globalParams = JSONUtils.toList(globalParamStr, Property.class); |
||||
for (Property property : globalParams) { |
||||
timeParams.put(property.getProp(), property.getValue()); |
||||
} |
||||
|
||||
// local params
|
||||
Map<String, Map<String, Object>> localUserDefParams = new HashMap<>(); |
||||
for (TaskNode taskNode : taskNodeList) { |
||||
String parameter = taskNode.getParams(); |
||||
Map<String, String> map = JSONUtils.toMap(parameter); |
||||
String localParams = map.get(LOCAL_PARAMS); |
||||
if (localParams != null && !localParams.isEmpty()) { |
||||
localParams = ParameterUtils.convertParameterPlaceholders(localParams, timeParams); |
||||
List<Property> localParamsList = JSONUtils.toList(localParams, Property.class); |
||||
|
||||
Map<String, Object> localParamsMap = new HashMap<>(); |
||||
localParamsMap.put(Constants.TASK_TYPE, taskNode.getType()); |
||||
localParamsMap.put(Constants.LOCAL_PARAMS_LIST, localParamsList); |
||||
if (CollectionUtils.isNotEmpty(localParamsList)) { |
||||
localUserDefParams.put(taskNode.getName(), localParamsMap); |
||||
} |
||||
} |
||||
|
||||
} |
||||
|
||||
Map<String, Object> resultMap = new HashMap<>(); |
||||
|
||||
resultMap.put(GLOBAL_PARAMS, globalParams); |
||||
resultMap.put(LOCAL_PARAMS, localUserDefParams); |
||||
|
||||
result.put(DATA_LIST, resultMap); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* encapsulation gantt structure |
||||
* |
||||
* @param processInstanceId process instance id |
||||
* @return gantt tree data |
||||
* @throws Exception exception when json parse |
||||
*/ |
||||
public Map<String, Object> viewGantt(Integer processInstanceId) throws Exception { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
ProcessInstance processInstance = processInstanceMapper.queryDetailById(processInstanceId); |
||||
|
||||
if (processInstance == null) { |
||||
throw new RuntimeException("workflow instance is null"); |
||||
} |
||||
|
||||
GanttDto ganttDto = new GanttDto(); |
||||
|
||||
DAG<String, TaskNode, TaskNodeRelation> dag = processInstance2DAG(processInstance); |
||||
//topological sort
|
||||
List<String> nodeList = dag.topologicalSort(); |
||||
|
||||
ganttDto.setTaskNames(nodeList); |
||||
|
||||
List<Task> taskList = new ArrayList<>(); |
||||
for (String node : nodeList) { |
||||
TaskInstance taskInstance = taskInstanceMapper.queryByInstanceIdAndName(processInstanceId, node); |
||||
if (taskInstance == null) { |
||||
continue; |
||||
} |
||||
Date startTime = taskInstance.getStartTime() == null ? new Date() : taskInstance.getStartTime(); |
||||
Date endTime = taskInstance.getEndTime() == null ? new Date() : taskInstance.getEndTime(); |
||||
Task task = new Task(); |
||||
task.setTaskName(taskInstance.getName()); |
||||
task.getStartDate().add(startTime.getTime()); |
||||
task.getEndDate().add(endTime.getTime()); |
||||
task.setIsoStart(startTime); |
||||
task.setIsoEnd(endTime); |
||||
task.setStatus(taskInstance.getState().toString()); |
||||
task.setExecutionDate(taskInstance.getStartTime()); |
||||
task.setDuration(DateUtils.format2Readable(endTime.getTime() - startTime.getTime())); |
||||
taskList.add(task); |
||||
} |
||||
ganttDto.setTasks(taskList); |
||||
|
||||
result.put(DATA_LIST, ganttDto); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* process instance to DAG |
||||
* |
||||
* @param processInstance input process instance |
||||
* @return process instance dag. |
||||
*/ |
||||
private static DAG<String, TaskNode, TaskNodeRelation> processInstance2DAG(ProcessInstance processInstance) { |
||||
|
||||
String processDefinitionJson = processInstance.getProcessInstanceJson(); |
||||
|
||||
ProcessData processData = JSONUtils.parseObject(processDefinitionJson, ProcessData.class); |
||||
|
||||
List<TaskNode> taskNodeList = processData.getTasks(); |
||||
|
||||
ProcessDag processDag = DagHelper.getProcessDag(taskNodeList); |
||||
|
||||
return DagHelper.buildDagGraph(processDag); |
||||
} |
||||
|
||||
/** |
||||
* query process instance by processDefinitionId and stateArray |
||||
* @param processDefinitionId processDefinitionId |
||||
* @param states states array |
||||
* @return process instance list |
||||
*/ |
||||
public List<ProcessInstance> queryByProcessDefineIdAndStatus(int processDefinitionId, int[] states) { |
||||
return processInstanceMapper.queryByProcessDefineIdAndStatus(processDefinitionId, states); |
||||
} |
||||
|
||||
/** |
||||
* query process instance by processDefinitionId |
||||
* @param processDefinitionId processDefinitionId |
||||
* @param size size |
||||
* @return process instance list |
||||
*/ |
||||
public List<ProcessInstance> queryByProcessDefineId(int processDefinitionId,int size) { |
||||
return processInstanceMapper.queryByProcessDefineId(processDefinitionId, size); |
||||
} |
||||
|
||||
} |
@ -0,0 +1,294 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.QueueService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.api.utils.Result; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.Queue; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.QueueMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.UserMapper; |
||||
|
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
|
||||
/** |
||||
* queue service impl |
||||
*/ |
||||
@Service |
||||
public class QueueServiceImpl extends BaseServiceImpl implements QueueService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(QueueServiceImpl.class); |
||||
|
||||
@Autowired |
||||
private QueueMapper queueMapper; |
||||
|
||||
@Autowired |
||||
private UserMapper userMapper; |
||||
|
||||
/** |
||||
* query queue list |
||||
* |
||||
* @param loginUser login user |
||||
* @return queue list |
||||
*/ |
||||
public Map<String, Object> queryList(User loginUser) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<Queue> queueList = queueMapper.selectList(null); |
||||
result.put(Constants.DATA_LIST, queueList); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query queue list paging |
||||
* |
||||
* @param loginUser login user |
||||
* @param pageNo page number |
||||
* @param searchVal search value |
||||
* @param pageSize page size |
||||
* @return queue list |
||||
*/ |
||||
public Map<String, Object> queryList(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
Page<Queue> page = new Page<>(pageNo, pageSize); |
||||
|
||||
IPage<Queue> queueList = queueMapper.queryQueuePaging(page, searchVal); |
||||
|
||||
Integer count = (int) queueList.getTotal(); |
||||
PageInfo<Queue> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(queueList.getRecords()); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* create queue |
||||
* |
||||
* @param loginUser login user |
||||
* @param queue queue |
||||
* @param queueName queue name |
||||
* @return create result |
||||
*/ |
||||
public Map<String, Object> createQueue(User loginUser, String queue, String queueName) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
if (StringUtils.isEmpty(queue)) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE); |
||||
return result; |
||||
} |
||||
|
||||
if (StringUtils.isEmpty(queueName)) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); |
||||
return result; |
||||
} |
||||
|
||||
if (checkQueueNameExist(queueName)) { |
||||
putMsg(result, Status.QUEUE_NAME_EXIST, queueName); |
||||
return result; |
||||
} |
||||
|
||||
if (checkQueueExist(queue)) { |
||||
putMsg(result, Status.QUEUE_VALUE_EXIST, queue); |
||||
return result; |
||||
} |
||||
|
||||
Queue queueObj = new Queue(); |
||||
Date now = new Date(); |
||||
|
||||
queueObj.setQueue(queue); |
||||
queueObj.setQueueName(queueName); |
||||
queueObj.setCreateTime(now); |
||||
queueObj.setUpdateTime(now); |
||||
|
||||
queueMapper.insert(queueObj); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* update queue |
||||
* |
||||
* @param loginUser login user |
||||
* @param queue queue |
||||
* @param id queue id |
||||
* @param queueName queue name |
||||
* @return update result code |
||||
*/ |
||||
public Map<String, Object> updateQueue(User loginUser, int id, String queue, String queueName) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
if (StringUtils.isEmpty(queue)) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE); |
||||
return result; |
||||
} |
||||
|
||||
if (StringUtils.isEmpty(queueName)) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); |
||||
return result; |
||||
} |
||||
|
||||
Queue queueObj = queueMapper.selectById(id); |
||||
if (queueObj == null) { |
||||
putMsg(result, Status.QUEUE_NOT_EXIST, id); |
||||
return result; |
||||
} |
||||
|
||||
// whether queue value or queueName is changed
|
||||
if (queue.equals(queueObj.getQueue()) && queueName.equals(queueObj.getQueueName())) { |
||||
putMsg(result, Status.NEED_NOT_UPDATE_QUEUE); |
||||
return result; |
||||
} |
||||
|
||||
// check queue name is exist
|
||||
if (!queueName.equals(queueObj.getQueueName()) |
||||
&& checkQueueNameExist(queueName)) { |
||||
putMsg(result, Status.QUEUE_NAME_EXIST, queueName); |
||||
return result; |
||||
} |
||||
|
||||
// check queue value is exist
|
||||
if (!queue.equals(queueObj.getQueue()) && checkQueueExist(queue)) { |
||||
putMsg(result, Status.QUEUE_VALUE_EXIST, queue); |
||||
return result; |
||||
} |
||||
|
||||
// check old queue using by any user
|
||||
if (checkIfQueueIsInUsing(queueObj.getQueueName(), queueName)) { |
||||
//update user related old queue
|
||||
Integer relatedUserNums = userMapper.updateUserQueue(queueObj.getQueueName(), queueName); |
||||
logger.info("old queue have related {} user, exec update user success.", relatedUserNums); |
||||
} |
||||
|
||||
// update queue
|
||||
Date now = new Date(); |
||||
queueObj.setQueue(queue); |
||||
queueObj.setQueueName(queueName); |
||||
queueObj.setUpdateTime(now); |
||||
|
||||
queueMapper.updateById(queueObj); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify queue and queueName |
||||
* |
||||
* @param queue queue |
||||
* @param queueName queue name |
||||
* @return true if the queue name not exists, otherwise return false |
||||
*/ |
||||
public Result<Object> verifyQueue(String queue, String queueName) { |
||||
Result<Object> result = new Result<>(); |
||||
|
||||
if (StringUtils.isEmpty(queue)) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE); |
||||
return result; |
||||
} |
||||
|
||||
if (StringUtils.isEmpty(queueName)) { |
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, Constants.QUEUE_NAME); |
||||
return result; |
||||
} |
||||
|
||||
if (checkQueueNameExist(queueName)) { |
||||
putMsg(result, Status.QUEUE_NAME_EXIST, queueName); |
||||
return result; |
||||
} |
||||
|
||||
if (checkQueueExist(queue)) { |
||||
putMsg(result, Status.QUEUE_VALUE_EXIST, queue); |
||||
return result; |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* check queue exist |
||||
* if exists return true,not exists return false |
||||
* check queue exist |
||||
* |
||||
* @param queue queue |
||||
* @return true if the queue not exists, otherwise return false |
||||
*/ |
||||
private boolean checkQueueExist(String queue) { |
||||
return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(queue, null)); |
||||
} |
||||
|
||||
/** |
||||
* check queue name exist |
||||
* if exists return true,not exists return false |
||||
* |
||||
* @param queueName queue name |
||||
* @return true if the queue name not exists, otherwise return false |
||||
*/ |
||||
private boolean checkQueueNameExist(String queueName) { |
||||
return CollectionUtils.isNotEmpty(queueMapper.queryAllQueueList(null, queueName)); |
||||
} |
||||
|
||||
/** |
||||
* check old queue name using by any user |
||||
* if need to update user |
||||
* |
||||
* @param oldQueue old queue name |
||||
* @param newQueue new queue name |
||||
* @return true if need to update user |
||||
*/ |
||||
private boolean checkIfQueueIsInUsing (String oldQueue, String newQueue) { |
||||
return !oldQueue.equals(newQueue) && CollectionUtils.isNotEmpty(userMapper.queryUserListByQueue(oldQueue)); |
||||
} |
||||
|
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,599 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.dto.ScheduleParam; |
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.exceptions.ServiceException; |
||||
import org.apache.dolphinscheduler.api.service.ExecutorService; |
||||
import org.apache.dolphinscheduler.api.service.MonitorService; |
||||
import org.apache.dolphinscheduler.api.service.ProjectService; |
||||
import org.apache.dolphinscheduler.api.service.SchedulerService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.FailureStrategy; |
||||
import org.apache.dolphinscheduler.common.enums.Priority; |
||||
import org.apache.dolphinscheduler.common.enums.ReleaseState; |
||||
import org.apache.dolphinscheduler.common.enums.UserType; |
||||
import org.apache.dolphinscheduler.common.enums.WarningType; |
||||
import org.apache.dolphinscheduler.common.model.Server; |
||||
import org.apache.dolphinscheduler.common.utils.DateUtils; |
||||
import org.apache.dolphinscheduler.common.utils.JSONUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.ProcessDefinition; |
||||
import org.apache.dolphinscheduler.dao.entity.Project; |
||||
import org.apache.dolphinscheduler.dao.entity.Schedule; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessDefinitionMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.ScheduleMapper; |
||||
import org.apache.dolphinscheduler.service.process.ProcessService; |
||||
import org.apache.dolphinscheduler.service.quartz.ProcessScheduleJob; |
||||
import org.apache.dolphinscheduler.service.quartz.QuartzExecutors; |
||||
import org.apache.dolphinscheduler.service.quartz.cron.CronUtils; |
||||
|
||||
import java.text.ParseException; |
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.quartz.CronExpression; |
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
|
||||
/** |
||||
* scheduler service impl |
||||
*/ |
||||
@Service |
||||
public class SchedulerServiceImpl extends BaseServiceImpl implements SchedulerService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SchedulerServiceImpl.class); |
||||
|
||||
@Autowired |
||||
private ProjectService projectService; |
||||
|
||||
@Autowired |
||||
private ExecutorService executorService; |
||||
|
||||
@Autowired |
||||
private MonitorService monitorService; |
||||
|
||||
@Autowired |
||||
private ProcessService processService; |
||||
|
||||
@Autowired |
||||
private ScheduleMapper scheduleMapper; |
||||
|
||||
@Autowired |
||||
private ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
private ProcessDefinitionMapper processDefinitionMapper; |
||||
|
||||
/** |
||||
* save schedule |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processDefineId process definition id |
||||
* @param schedule scheduler |
||||
* @param warningType warning type |
||||
* @param warningGroupId warning group id |
||||
* @param failureStrategy failure strategy |
||||
* @param processInstancePriority process instance priority |
||||
* @param workerGroup worker group |
||||
* @return create result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Map<String, Object> insertSchedule(User loginUser, String projectName, |
||||
Integer processDefineId, |
||||
String schedule, |
||||
WarningType warningType, |
||||
int warningGroupId, |
||||
FailureStrategy failureStrategy, |
||||
Priority processInstancePriority, |
||||
String workerGroup) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); |
||||
if (!hasProjectAndPerm) { |
||||
return result; |
||||
} |
||||
|
||||
// check work flow define release state
|
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); |
||||
result = executorService.checkProcessDefinitionValid(processDefinition, processDefineId); |
||||
if (result.get(Constants.STATUS) != Status.SUCCESS) { |
||||
return result; |
||||
} |
||||
|
||||
Schedule scheduleObj = new Schedule(); |
||||
Date now = new Date(); |
||||
|
||||
scheduleObj.setProjectName(projectName); |
||||
scheduleObj.setProcessDefinitionId(processDefinition.getId()); |
||||
scheduleObj.setProcessDefinitionName(processDefinition.getName()); |
||||
|
||||
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); |
||||
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { |
||||
logger.warn("The start time must not be the same as the end"); |
||||
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); |
||||
return result; |
||||
} |
||||
scheduleObj.setStartTime(scheduleParam.getStartTime()); |
||||
scheduleObj.setEndTime(scheduleParam.getEndTime()); |
||||
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { |
||||
logger.error("{} verify failure", scheduleParam.getCrontab()); |
||||
|
||||
putMsg(result, Status.REQUEST_PARAMS_NOT_VALID_ERROR, scheduleParam.getCrontab()); |
||||
return result; |
||||
} |
||||
scheduleObj.setCrontab(scheduleParam.getCrontab()); |
||||
scheduleObj.setWarningType(warningType); |
||||
scheduleObj.setWarningGroupId(warningGroupId); |
||||
scheduleObj.setFailureStrategy(failureStrategy); |
||||
scheduleObj.setCreateTime(now); |
||||
scheduleObj.setUpdateTime(now); |
||||
scheduleObj.setUserId(loginUser.getId()); |
||||
scheduleObj.setUserName(loginUser.getUserName()); |
||||
scheduleObj.setReleaseState(ReleaseState.OFFLINE); |
||||
scheduleObj.setProcessInstancePriority(processInstancePriority); |
||||
scheduleObj.setWorkerGroup(workerGroup); |
||||
scheduleMapper.insert(scheduleObj); |
||||
|
||||
/** |
||||
* updateProcessInstance receivers and cc by process definition id |
||||
*/ |
||||
processDefinition.setWarningGroupId(warningGroupId); |
||||
processDefinitionMapper.updateById(processDefinition); |
||||
|
||||
// return scheduler object with ID
|
||||
result.put(Constants.DATA_LIST, scheduleMapper.selectById(scheduleObj.getId())); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
result.put("scheduleId", scheduleObj.getId()); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance schedule |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param id scheduler id |
||||
* @param scheduleExpression scheduler |
||||
* @param warningType warning type |
||||
* @param warningGroupId warning group id |
||||
* @param failureStrategy failure strategy |
||||
* @param workerGroup worker group |
||||
* @param processInstancePriority process instance priority |
||||
* @param scheduleStatus schedule status |
||||
* @return update result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Map<String, Object> updateSchedule(User loginUser, |
||||
String projectName, |
||||
Integer id, |
||||
String scheduleExpression, |
||||
WarningType warningType, |
||||
int warningGroupId, |
||||
FailureStrategy failureStrategy, |
||||
ReleaseState scheduleStatus, |
||||
Priority processInstancePriority, |
||||
String workerGroup) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); |
||||
if (!hasProjectAndPerm) { |
||||
return result; |
||||
} |
||||
|
||||
// check schedule exists
|
||||
Schedule schedule = scheduleMapper.selectById(id); |
||||
|
||||
if (schedule == null) { |
||||
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); |
||||
return result; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(schedule.getProcessDefinitionId()); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, schedule.getProcessDefinitionId()); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* scheduling on-line status forbid modification |
||||
*/ |
||||
if (checkValid(result, schedule.getReleaseState() == ReleaseState.ONLINE, Status.SCHEDULE_CRON_ONLINE_FORBID_UPDATE)) { |
||||
return result; |
||||
} |
||||
|
||||
Date now = new Date(); |
||||
|
||||
// updateProcessInstance param
|
||||
if (StringUtils.isNotEmpty(scheduleExpression)) { |
||||
ScheduleParam scheduleParam = JSONUtils.parseObject(scheduleExpression, ScheduleParam.class); |
||||
if (DateUtils.differSec(scheduleParam.getStartTime(), scheduleParam.getEndTime()) == 0) { |
||||
logger.warn("The start time must not be the same as the end"); |
||||
putMsg(result, Status.SCHEDULE_START_TIME_END_TIME_SAME); |
||||
return result; |
||||
} |
||||
schedule.setStartTime(scheduleParam.getStartTime()); |
||||
schedule.setEndTime(scheduleParam.getEndTime()); |
||||
if (!org.quartz.CronExpression.isValidExpression(scheduleParam.getCrontab())) { |
||||
putMsg(result, Status.SCHEDULE_CRON_CHECK_FAILED, scheduleParam.getCrontab()); |
||||
return result; |
||||
} |
||||
schedule.setCrontab(scheduleParam.getCrontab()); |
||||
} |
||||
|
||||
if (warningType != null) { |
||||
schedule.setWarningType(warningType); |
||||
} |
||||
|
||||
schedule.setWarningGroupId(warningGroupId); |
||||
|
||||
if (failureStrategy != null) { |
||||
schedule.setFailureStrategy(failureStrategy); |
||||
} |
||||
|
||||
if (scheduleStatus != null) { |
||||
schedule.setReleaseState(scheduleStatus); |
||||
} |
||||
schedule.setWorkerGroup(workerGroup); |
||||
schedule.setUpdateTime(now); |
||||
schedule.setProcessInstancePriority(processInstancePriority); |
||||
scheduleMapper.updateById(schedule); |
||||
|
||||
/** |
||||
* updateProcessInstance recipients and cc by process definition ID |
||||
*/ |
||||
processDefinition.setWarningGroupId(warningGroupId); |
||||
|
||||
processDefinitionMapper.updateById(processDefinition); |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* set schedule online or offline |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param id scheduler id |
||||
* @param scheduleStatus schedule status |
||||
* @return publish result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Map<String, Object> setScheduleState(User loginUser, |
||||
String projectName, |
||||
Integer id, |
||||
ReleaseState scheduleStatus) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
// check project auth
|
||||
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); |
||||
if (!hasProjectAndPerm) { |
||||
return result; |
||||
} |
||||
|
||||
// check schedule exists
|
||||
Schedule scheduleObj = scheduleMapper.selectById(id); |
||||
|
||||
if (scheduleObj == null) { |
||||
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, id); |
||||
return result; |
||||
} |
||||
// check schedule release state
|
||||
if (scheduleObj.getReleaseState() == scheduleStatus) { |
||||
logger.info("schedule release is already {},needn't to change schedule id: {} from {} to {}", |
||||
scheduleObj.getReleaseState(), scheduleObj.getId(), scheduleObj.getReleaseState(), scheduleStatus); |
||||
putMsg(result, Status.SCHEDULE_CRON_REALEASE_NEED_NOT_CHANGE, scheduleStatus); |
||||
return result; |
||||
} |
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(scheduleObj.getProcessDefinitionId()); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, scheduleObj.getProcessDefinitionId()); |
||||
return result; |
||||
} |
||||
|
||||
if (scheduleStatus == ReleaseState.ONLINE) { |
||||
// check process definition release state
|
||||
if (processDefinition.getReleaseState() != ReleaseState.ONLINE) { |
||||
logger.info("not release process definition id: {} , name : {}", |
||||
processDefinition.getId(), processDefinition.getName()); |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, processDefinition.getName()); |
||||
return result; |
||||
} |
||||
// check sub process definition release state
|
||||
List<Integer> subProcessDefineIds = new ArrayList<>(); |
||||
processService.recurseFindSubProcessId(scheduleObj.getProcessDefinitionId(), subProcessDefineIds); |
||||
Integer[] idArray = subProcessDefineIds.toArray(new Integer[subProcessDefineIds.size()]); |
||||
if (!subProcessDefineIds.isEmpty()) { |
||||
List<ProcessDefinition> subProcessDefinitionList = |
||||
processDefinitionMapper.queryDefinitionListByIdList(idArray); |
||||
if (subProcessDefinitionList != null && !subProcessDefinitionList.isEmpty()) { |
||||
for (ProcessDefinition subProcessDefinition : subProcessDefinitionList) { |
||||
/** |
||||
* if there is no online process, exit directly |
||||
*/ |
||||
if (subProcessDefinition.getReleaseState() != ReleaseState.ONLINE) { |
||||
logger.info("not release process definition id: {} , name : {}", |
||||
subProcessDefinition.getId(), subProcessDefinition.getName()); |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_RELEASE, subProcessDefinition.getId()); |
||||
return result; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
// check master server exists
|
||||
List<Server> masterServers = monitorService.getServerListFromZK(true); |
||||
|
||||
if (masterServers.isEmpty()) { |
||||
putMsg(result, Status.MASTER_NOT_EXISTS); |
||||
return result; |
||||
} |
||||
|
||||
// set status
|
||||
scheduleObj.setReleaseState(scheduleStatus); |
||||
|
||||
scheduleMapper.updateById(scheduleObj); |
||||
|
||||
try { |
||||
switch (scheduleStatus) { |
||||
case ONLINE: |
||||
logger.info("Call master client set schedule online, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); |
||||
setSchedule(project.getId(), scheduleObj); |
||||
break; |
||||
case OFFLINE: |
||||
logger.info("Call master client set schedule offline, project id: {}, flow id: {},host: {}", project.getId(), processDefinition.getId(), masterServers); |
||||
deleteSchedule(project.getId(), id); |
||||
break; |
||||
default: |
||||
putMsg(result, Status.SCHEDULE_STATUS_UNKNOWN, scheduleStatus.toString()); |
||||
return result; |
||||
} |
||||
} catch (Exception e) { |
||||
result.put(Constants.MSG, scheduleStatus == ReleaseState.ONLINE ? "set online failure" : "set offline failure"); |
||||
throw new ServiceException(result.get(Constants.MSG).toString()); |
||||
} |
||||
|
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query schedule |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processDefineId process definition id |
||||
* @param pageNo page number |
||||
* @param pageSize page size |
||||
* @param searchVal search value |
||||
* @return schedule list page |
||||
*/ |
||||
public Map<String, Object> querySchedule(User loginUser, String projectName, Integer processDefineId, String searchVal, Integer pageNo, Integer pageSize) { |
||||
|
||||
HashMap<String, Object> result = new HashMap<>(); |
||||
|
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); |
||||
if (!hasProjectAndPerm) { |
||||
return result; |
||||
} |
||||
|
||||
ProcessDefinition processDefinition = processService.findProcessDefineById(processDefineId); |
||||
if (processDefinition == null) { |
||||
putMsg(result, Status.PROCESS_DEFINE_NOT_EXIST, processDefineId); |
||||
return result; |
||||
} |
||||
Page<Schedule> page = new Page<>(pageNo, pageSize); |
||||
IPage<Schedule> scheduleIPage = scheduleMapper.queryByProcessDefineIdPaging( |
||||
page, processDefineId, searchVal |
||||
); |
||||
|
||||
PageInfo<Schedule> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
pageInfo.setTotalCount((int) scheduleIPage.getTotal()); |
||||
pageInfo.setLists(scheduleIPage.getRecords()); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query schedule list |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @return schedule list |
||||
*/ |
||||
public Map<String, Object> queryScheduleList(User loginUser, String projectName) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check project auth
|
||||
boolean hasProjectAndPerm = projectService.hasProjectAndPerm(loginUser, project, result); |
||||
if (!hasProjectAndPerm) { |
||||
return result; |
||||
} |
||||
|
||||
List<Schedule> schedules = scheduleMapper.querySchedulerListByProjectName(projectName); |
||||
|
||||
result.put(Constants.DATA_LIST, schedules); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
public void setSchedule(int projectId, Schedule schedule) { |
||||
int scheduleId = schedule.getId(); |
||||
logger.info("set schedule, project id: {}, scheduleId: {}", projectId, scheduleId); |
||||
|
||||
Date startDate = schedule.getStartTime(); |
||||
Date endDate = schedule.getEndTime(); |
||||
|
||||
String jobName = QuartzExecutors.buildJobName(scheduleId); |
||||
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); |
||||
|
||||
Map<String, Object> dataMap = QuartzExecutors.buildDataMap(projectId, scheduleId, schedule); |
||||
|
||||
QuartzExecutors.getInstance().addJob(ProcessScheduleJob.class, jobName, jobGroupName, startDate, endDate, |
||||
schedule.getCrontab(), dataMap); |
||||
|
||||
} |
||||
|
||||
/** |
||||
* delete schedule |
||||
* |
||||
* @param projectId project id |
||||
* @param scheduleId schedule id |
||||
* @throws RuntimeException runtime exception |
||||
*/ |
||||
public void deleteSchedule(int projectId, int scheduleId) { |
||||
logger.info("delete schedules of project id:{}, schedule id:{}", projectId, scheduleId); |
||||
|
||||
String jobName = QuartzExecutors.buildJobName(scheduleId); |
||||
String jobGroupName = QuartzExecutors.buildJobGroupName(projectId); |
||||
|
||||
if (!QuartzExecutors.getInstance().deleteJob(jobName, jobGroupName)) { |
||||
logger.warn("set offline failure:projectId:{},scheduleId:{}", projectId, scheduleId); |
||||
throw new ServiceException("set offline failure"); |
||||
} |
||||
|
||||
} |
||||
|
||||
/** |
||||
* check valid |
||||
* |
||||
* @param result result |
||||
* @param bool bool |
||||
* @param status status |
||||
* @return check result code |
||||
*/ |
||||
private boolean checkValid(Map<String, Object> result, boolean bool, Status status) { |
||||
// timeout is valid
|
||||
if (bool) { |
||||
putMsg(result, status); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* delete schedule by id |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param scheduleId scheule id |
||||
* @return delete result code |
||||
*/ |
||||
public Map<String, Object> deleteScheduleById(User loginUser, String projectName, Integer scheduleId) { |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status resultEnum = (Status) checkResult.get(Constants.STATUS); |
||||
if (resultEnum != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
Schedule schedule = scheduleMapper.selectById(scheduleId); |
||||
|
||||
if (schedule == null) { |
||||
putMsg(result, Status.SCHEDULE_CRON_NOT_EXISTS, scheduleId); |
||||
return result; |
||||
} |
||||
|
||||
// Determine if the login user is the owner of the schedule
|
||||
if (loginUser.getId() != schedule.getUserId() |
||||
&& loginUser.getUserType() != UserType.ADMIN_USER) { |
||||
putMsg(result, Status.USER_NO_OPERATION_PERM); |
||||
return result; |
||||
} |
||||
|
||||
// check schedule is already online
|
||||
if (schedule.getReleaseState() == ReleaseState.ONLINE) { |
||||
putMsg(result, Status.SCHEDULE_CRON_STATE_ONLINE, schedule.getId()); |
||||
return result; |
||||
} |
||||
|
||||
int delete = scheduleMapper.deleteById(scheduleId); |
||||
|
||||
if (delete > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.DELETE_SCHEDULE_CRON_BY_ID_ERROR); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* preview schedule |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param schedule schedule expression |
||||
* @return the next five fire time |
||||
*/ |
||||
public Map<String, Object> previewSchedule(User loginUser, String projectName, String schedule) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
CronExpression cronExpression; |
||||
ScheduleParam scheduleParam = JSONUtils.parseObject(schedule, ScheduleParam.class); |
||||
Date now = new Date(); |
||||
|
||||
Date startTime = now.after(scheduleParam.getStartTime()) ? now : scheduleParam.getStartTime(); |
||||
Date endTime = scheduleParam.getEndTime(); |
||||
try { |
||||
cronExpression = CronUtils.parse2CronExpression(scheduleParam.getCrontab()); |
||||
} catch (ParseException e) { |
||||
logger.error(e.getMessage(), e); |
||||
putMsg(result, Status.PARSE_TO_CRON_EXPRESSION_ERROR); |
||||
return result; |
||||
} |
||||
List<Date> selfFireDateList = CronUtils.getSelfFireDateList(startTime, endTime, cronExpression, Constants.PREVIEW_SCHEDULE_EXECUTE_COUNT); |
||||
result.put(Constants.DATA_LIST, selfFireDateList.stream().map(DateUtils::dateToString)); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,207 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.ProcessInstanceService; |
||||
import org.apache.dolphinscheduler.api.service.ProjectService; |
||||
import org.apache.dolphinscheduler.api.service.TaskInstanceService; |
||||
import org.apache.dolphinscheduler.api.service.UsersService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.ExecutionStatus; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.DateUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.Project; |
||||
import org.apache.dolphinscheduler.dao.entity.TaskInstance; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProjectMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.TaskInstanceMapper; |
||||
import org.apache.dolphinscheduler.service.process.ProcessService; |
||||
|
||||
import java.text.MessageFormat; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.HashSet; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Set; |
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
|
||||
/** |
||||
* task instance service impl |
||||
*/ |
||||
@Service |
||||
public class TaskInstanceServiceImpl extends BaseServiceImpl implements TaskInstanceService { |
||||
|
||||
@Autowired |
||||
ProjectMapper projectMapper; |
||||
|
||||
@Autowired |
||||
ProjectService projectService; |
||||
|
||||
@Autowired |
||||
ProcessService processService; |
||||
|
||||
@Autowired |
||||
TaskInstanceMapper taskInstanceMapper; |
||||
|
||||
@Autowired |
||||
ProcessInstanceService processInstanceService; |
||||
|
||||
@Autowired |
||||
UsersService usersService; |
||||
|
||||
/** |
||||
* query task list by project, process instance, task name, task start time, task end time, task status, keyword paging |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param processInstanceId process instance id |
||||
* @param searchVal search value |
||||
* @param taskName task name |
||||
* @param stateType state type |
||||
* @param host host |
||||
* @param startDate start time |
||||
* @param endDate end time |
||||
* @param pageNo page number |
||||
* @param pageSize page size |
||||
* @return task list page |
||||
*/ |
||||
public Map<String, Object> queryTaskListPaging(User loginUser, String projectName, |
||||
Integer processInstanceId, String processInstanceName, String taskName, String executorName, String startDate, |
||||
String endDate, String searchVal, ExecutionStatus stateType, String host, |
||||
Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
int[] statusArray = null; |
||||
if (stateType != null) { |
||||
statusArray = new int[]{stateType.ordinal()}; |
||||
} |
||||
|
||||
Date start = null; |
||||
Date end = null; |
||||
if (StringUtils.isNotEmpty(startDate)) { |
||||
start = DateUtils.getScheduleDate(startDate); |
||||
if (start == null) { |
||||
return generateInvalidParamRes(result, "startDate"); |
||||
} |
||||
} |
||||
if (StringUtils.isNotEmpty(endDate)) { |
||||
end = DateUtils.getScheduleDate(endDate); |
||||
if (end == null) { |
||||
return generateInvalidParamRes(result, "endDate"); |
||||
} |
||||
} |
||||
|
||||
Page<TaskInstance> page = new Page<>(pageNo, pageSize); |
||||
PageInfo<Map<String, Object>> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
int executorId = usersService.getUserIdByName(executorName); |
||||
|
||||
IPage<TaskInstance> taskInstanceIPage = taskInstanceMapper.queryTaskInstanceListPaging( |
||||
page, project.getId(), processInstanceId, processInstanceName, searchVal, taskName, executorId, statusArray, host, start, end |
||||
); |
||||
Set<String> exclusionSet = new HashSet<>(); |
||||
exclusionSet.add(Constants.CLASS); |
||||
exclusionSet.add("taskJson"); |
||||
List<TaskInstance> taskInstanceList = taskInstanceIPage.getRecords(); |
||||
|
||||
for (TaskInstance taskInstance : taskInstanceList) { |
||||
taskInstance.setDuration(DateUtils.format2Duration(taskInstance.getStartTime(), taskInstance.getEndTime())); |
||||
User executor = usersService.queryUser(taskInstance.getExecutorId()); |
||||
if (null != executor) { |
||||
taskInstance.setExecutorName(executor.getUserName()); |
||||
} |
||||
} |
||||
pageInfo.setTotalCount((int) taskInstanceIPage.getTotal()); |
||||
pageInfo.setLists(CollectionUtils.getListByExclusion(taskInstanceIPage.getRecords(), exclusionSet)); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* change one task instance's state from failure to forced success |
||||
* |
||||
* @param loginUser login user |
||||
* @param projectName project name |
||||
* @param taskInstanceId task instance id |
||||
* @return the result code and msg |
||||
*/ |
||||
public Map<String, Object> forceTaskSuccess(User loginUser, String projectName, Integer taskInstanceId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
Project project = projectMapper.queryByName(projectName); |
||||
|
||||
// check user auth
|
||||
Map<String, Object> checkResult = projectService.checkProjectAndAuth(loginUser, project, projectName); |
||||
Status status = (Status) checkResult.get(Constants.STATUS); |
||||
if (status != Status.SUCCESS) { |
||||
return checkResult; |
||||
} |
||||
|
||||
// check whether the task instance can be found
|
||||
TaskInstance task = taskInstanceMapper.selectById(taskInstanceId); |
||||
if (task == null) { |
||||
putMsg(result, Status.TASK_INSTANCE_NOT_FOUND); |
||||
return result; |
||||
} |
||||
|
||||
// check whether the task instance state type is failure
|
||||
if (!task.getState().typeIsFailure()) { |
||||
putMsg(result, Status.TASK_INSTANCE_STATE_OPERATION_ERROR, taskInstanceId, task.getState().toString()); |
||||
return result; |
||||
} |
||||
|
||||
// change the state of the task instance
|
||||
task.setState(ExecutionStatus.FORCED_SUCCESS); |
||||
int changedNum = taskInstanceMapper.updateById(task); |
||||
if (changedNum > 0) { |
||||
putMsg(result, Status.SUCCESS); |
||||
} else { |
||||
putMsg(result, Status.FORCE_TASK_SUCCESS_ERROR); |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
/*** |
||||
* generate {@link org.apache.dolphinscheduler.api.enums.Status#REQUEST_PARAMS_NOT_VALID_ERROR} res with param name |
||||
* @param result exist result map |
||||
* @param params invalid params name |
||||
* @return update result map |
||||
*/ |
||||
private Map<String, Object> generateInvalidParamRes(Map<String, Object> result, String params) { |
||||
result.put(Constants.STATUS, Status.REQUEST_PARAMS_NOT_VALID_ERROR); |
||||
result.put(Constants.MSG, MessageFormat.format(Status.REQUEST_PARAMS_NOT_VALID_ERROR.getMsg(), params)); |
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,85 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.TASK_RECORD_TABLE_HISTORY_HIVE_LOG; |
||||
import static org.apache.dolphinscheduler.common.Constants.TASK_RECORD_TABLE_HIVE_LOG; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.TaskRecordService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.dao.TaskRecordDao; |
||||
import org.apache.dolphinscheduler.dao.entity.TaskRecord; |
||||
|
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.springframework.stereotype.Service; |
||||
|
||||
/** |
||||
* task record service impl |
||||
*/ |
||||
@Service |
||||
public class TaskRecordServiceImpl extends BaseServiceImpl implements TaskRecordService { |
||||
|
||||
/** |
||||
* query task record list paging |
||||
* |
||||
* @param taskName task name |
||||
* @param state state |
||||
* @param sourceTable source table |
||||
* @param destTable destination table |
||||
* @param taskDate task date |
||||
* @param startDate start time |
||||
* @param endDate end time |
||||
* @param pageNo page numbere |
||||
* @param pageSize page size |
||||
* @param isHistory is history |
||||
* @return task record list |
||||
*/ |
||||
public Map<String,Object> queryTaskRecordListPaging(boolean isHistory, String taskName, String startDate, |
||||
String taskDate, String sourceTable, |
||||
String destTable, String endDate, |
||||
String state, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
PageInfo<TaskRecord> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
|
||||
Map<String, String> map = new HashMap<>(); |
||||
map.put("taskName", taskName); |
||||
map.put("taskDate", taskDate); |
||||
map.put("state", state); |
||||
map.put("sourceTable", sourceTable); |
||||
map.put("targetTable", destTable); |
||||
map.put("startTime", startDate); |
||||
map.put("endTime", endDate); |
||||
map.put("offset", pageInfo.getStart().toString()); |
||||
map.put("pageSize", pageInfo.getPageSize().toString()); |
||||
|
||||
String table = isHistory ? TASK_RECORD_TABLE_HISTORY_HIVE_LOG : TASK_RECORD_TABLE_HIVE_LOG; |
||||
int count = TaskRecordDao.countTaskRecord(map, table); |
||||
List<TaskRecord> recordList = TaskRecordDao.queryAllTaskRecord(map, table); |
||||
pageInfo.setTotalCount(count); |
||||
pageInfo.setLists(recordList); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
|
||||
return result; |
||||
} |
||||
} |
@ -0,0 +1,325 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.UdfFuncService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.api.utils.Result; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.enums.UdfType; |
||||
import org.apache.dolphinscheduler.common.utils.PropertyUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.Resource; |
||||
import org.apache.dolphinscheduler.dao.entity.UdfFunc; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.mapper.ResourceMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.UDFUserMapper; |
||||
import org.apache.dolphinscheduler.dao.mapper.UdfFuncMapper; |
||||
|
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.slf4j.Logger; |
||||
import org.slf4j.LoggerFactory; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
import org.springframework.transaction.annotation.Transactional; |
||||
|
||||
import com.baomidou.mybatisplus.core.metadata.IPage; |
||||
import com.baomidou.mybatisplus.extension.plugins.pagination.Page; |
||||
|
||||
/** |
||||
* udf func service impl |
||||
*/ |
||||
@Service |
||||
public class UdfFuncServiceImpl extends BaseServiceImpl implements UdfFuncService { |
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(UdfFuncServiceImpl.class); |
||||
|
||||
@Autowired |
||||
private ResourceMapper resourceMapper; |
||||
|
||||
@Autowired |
||||
private UdfFuncMapper udfFuncMapper; |
||||
|
||||
@Autowired |
||||
private UDFUserMapper udfUserMapper; |
||||
|
||||
/** |
||||
* create udf function |
||||
* |
||||
* @param loginUser login user |
||||
* @param type udf type |
||||
* @param funcName function name |
||||
* @param argTypes argument types |
||||
* @param database database |
||||
* @param desc description |
||||
* @param resourceId resource id |
||||
* @param className class name |
||||
* @return create result code |
||||
*/ |
||||
public Result<Object> createUdfFunction(User loginUser, |
||||
String funcName, |
||||
String className, |
||||
String argTypes, |
||||
String database, |
||||
String desc, |
||||
UdfType type, |
||||
int resourceId) { |
||||
Result<Object> result = new Result<>(); |
||||
|
||||
// if resource upload startup
|
||||
if (!PropertyUtils.getResUploadStartupState()) { |
||||
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
// verify udf func name exist
|
||||
if (checkUdfFuncNameExists(funcName)) { |
||||
putMsg(result, Status.UDF_FUNCTION_EXISTS); |
||||
return result; |
||||
} |
||||
|
||||
Resource resource = resourceMapper.selectById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("resourceId {} is not exist", resourceId); |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
|
||||
//save data
|
||||
UdfFunc udf = new UdfFunc(); |
||||
Date now = new Date(); |
||||
udf.setUserId(loginUser.getId()); |
||||
udf.setFuncName(funcName); |
||||
udf.setClassName(className); |
||||
if (StringUtils.isNotEmpty(argTypes)) { |
||||
udf.setArgTypes(argTypes); |
||||
} |
||||
if (StringUtils.isNotEmpty(database)) { |
||||
udf.setDatabase(database); |
||||
} |
||||
udf.setDescription(desc); |
||||
udf.setResourceId(resourceId); |
||||
udf.setResourceName(resource.getFullName()); |
||||
udf.setType(type); |
||||
|
||||
udf.setCreateTime(now); |
||||
udf.setUpdateTime(now); |
||||
|
||||
udfFuncMapper.insert(udf); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* @param name name |
||||
* @return check result code |
||||
*/ |
||||
private boolean checkUdfFuncNameExists(String name) { |
||||
List<UdfFunc> resource = udfFuncMapper.queryUdfByIdStr(null, name); |
||||
return resource != null && !resource.isEmpty(); |
||||
} |
||||
|
||||
/** |
||||
* query udf function |
||||
* |
||||
* @param id udf function id |
||||
* @return udf function detail |
||||
*/ |
||||
public Map<String, Object> queryUdfFuncDetail(int id) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
UdfFunc udfFunc = udfFuncMapper.selectById(id); |
||||
if (udfFunc == null) { |
||||
putMsg(result, Status.RESOURCE_NOT_EXIST); |
||||
return result; |
||||
} |
||||
result.put(Constants.DATA_LIST, udfFunc); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* updateProcessInstance udf function |
||||
* |
||||
* @param udfFuncId udf function id |
||||
* @param type resource type |
||||
* @param funcName function name |
||||
* @param argTypes argument types |
||||
* @param database data base |
||||
* @param desc description |
||||
* @param resourceId resource id |
||||
* @param className class name |
||||
* @return update result code |
||||
*/ |
||||
public Map<String, Object> updateUdfFunc(int udfFuncId, |
||||
String funcName, |
||||
String className, |
||||
String argTypes, |
||||
String database, |
||||
String desc, |
||||
UdfType type, |
||||
int resourceId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
// verify udfFunc is exist
|
||||
UdfFunc udf = udfFuncMapper.selectUdfById(udfFuncId); |
||||
|
||||
if (udf == null) { |
||||
result.put(Constants.STATUS, Status.UDF_FUNCTION_NOT_EXIST); |
||||
result.put(Constants.MSG, Status.UDF_FUNCTION_NOT_EXIST.getMsg()); |
||||
return result; |
||||
} |
||||
|
||||
// if resource upload startup
|
||||
if (!PropertyUtils.getResUploadStartupState()) { |
||||
logger.error("resource upload startup state: {}", PropertyUtils.getResUploadStartupState()); |
||||
putMsg(result, Status.HDFS_NOT_STARTUP); |
||||
return result; |
||||
} |
||||
|
||||
// verify udfFuncName is exist
|
||||
if (!funcName.equals(udf.getFuncName())) { |
||||
if (checkUdfFuncNameExists(funcName)) { |
||||
logger.error("UdfFunc {} has exist, can't create again.", funcName); |
||||
result.put(Constants.STATUS, Status.UDF_FUNCTION_EXISTS); |
||||
result.put(Constants.MSG, Status.UDF_FUNCTION_EXISTS.getMsg()); |
||||
return result; |
||||
} |
||||
} |
||||
|
||||
Resource resource = resourceMapper.selectById(resourceId); |
||||
if (resource == null) { |
||||
logger.error("resourceId {} is not exist", resourceId); |
||||
result.put(Constants.STATUS, Status.RESOURCE_NOT_EXIST); |
||||
result.put(Constants.MSG, Status.RESOURCE_NOT_EXIST.getMsg()); |
||||
return result; |
||||
} |
||||
Date now = new Date(); |
||||
udf.setFuncName(funcName); |
||||
udf.setClassName(className); |
||||
udf.setArgTypes(argTypes); |
||||
if (StringUtils.isNotEmpty(database)) { |
||||
udf.setDatabase(database); |
||||
} |
||||
udf.setDescription(desc); |
||||
udf.setResourceId(resourceId); |
||||
udf.setResourceName(resource.getFullName()); |
||||
udf.setType(type); |
||||
|
||||
udf.setUpdateTime(now); |
||||
|
||||
udfFuncMapper.updateById(udf); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query udf function list paging |
||||
* |
||||
* @param loginUser login user |
||||
* @param pageNo page number |
||||
* @param pageSize page size |
||||
* @param searchVal search value |
||||
* @return udf function list page |
||||
*/ |
||||
public Map<String, Object> queryUdfFuncListPaging(User loginUser, String searchVal, Integer pageNo, Integer pageSize) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
PageInfo<UdfFunc> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
IPage<UdfFunc> udfFuncList = getUdfFuncsPage(loginUser, searchVal, pageSize, pageNo); |
||||
pageInfo.setTotalCount((int)udfFuncList.getTotal()); |
||||
pageInfo.setLists(udfFuncList.getRecords()); |
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get udf functions |
||||
* |
||||
* @param loginUser login user |
||||
* @param searchVal search value |
||||
* @param pageSize page size |
||||
* @param pageNo page number |
||||
* @return udf function list page |
||||
*/ |
||||
private IPage<UdfFunc> getUdfFuncsPage(User loginUser, String searchVal, Integer pageSize, int pageNo) { |
||||
int userId = loginUser.getId(); |
||||
if (isAdmin(loginUser)) { |
||||
userId = 0; |
||||
} |
||||
Page<UdfFunc> page = new Page<>(pageNo, pageSize); |
||||
return udfFuncMapper.queryUdfFuncPaging(page, userId, searchVal); |
||||
} |
||||
|
||||
/** |
||||
* query udf list |
||||
* |
||||
* @param loginUser login user |
||||
* @param type udf type |
||||
* @return udf func list |
||||
*/ |
||||
public Map<String, Object> queryUdfFuncList(User loginUser, Integer type) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
int userId = loginUser.getId(); |
||||
if (isAdmin(loginUser)) { |
||||
userId = 0; |
||||
} |
||||
List<UdfFunc> udfFuncList = udfFuncMapper.getUdfFuncByType(userId, type); |
||||
|
||||
result.put(Constants.DATA_LIST, udfFuncList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* delete udf function |
||||
* |
||||
* @param id udf function id |
||||
* @return delete result code |
||||
*/ |
||||
@Transactional(rollbackFor = RuntimeException.class) |
||||
public Result<Object> delete(int id) { |
||||
Result<Object> result = new Result<>(); |
||||
udfFuncMapper.deleteById(id); |
||||
udfUserMapper.deleteByUdfFuncId(id); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* verify udf function by name |
||||
* |
||||
* @param name name |
||||
* @return true if the name can user, otherwise return false |
||||
*/ |
||||
public Result<Object> verifyUdfFuncByName(String name) { |
||||
Result<Object> result = new Result<>(); |
||||
if (checkUdfFuncNameExists(name)) { |
||||
putMsg(result, Status.UDF_FUNCTION_EXISTS); |
||||
} else { |
||||
putMsg(result, Status.SUCCESS); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
} |
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,108 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.WorkFlowLineageService; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.dao.entity.WorkFlowLineage; |
||||
import org.apache.dolphinscheduler.dao.entity.WorkFlowRelation; |
||||
import org.apache.dolphinscheduler.dao.mapper.WorkFlowLineageMapper; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.HashSet; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Set; |
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
/** |
||||
* work flow lineage service impl |
||||
*/ |
||||
@Service |
||||
public class WorkFlowLineageServiceImpl extends BaseServiceImpl implements WorkFlowLineageService { |
||||
|
||||
@Autowired |
||||
private WorkFlowLineageMapper workFlowLineageMapper; |
||||
|
||||
public Map<String, Object> queryWorkFlowLineageByName(String workFlowName, int projectId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByName(workFlowName, projectId); |
||||
result.put(Constants.DATA_LIST, workFlowLineageList); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
private void getWorkFlowRelationRecursion(Set<Integer> ids, List<WorkFlowRelation> workFlowRelations, Set<Integer> sourceIds) { |
||||
for (int id : ids) { |
||||
sourceIds.addAll(ids); |
||||
List<WorkFlowRelation> workFlowRelationsTmp = workFlowLineageMapper.querySourceTarget(id); |
||||
if (workFlowRelationsTmp != null && !workFlowRelationsTmp.isEmpty()) { |
||||
Set<Integer> idsTmp = new HashSet<>(); |
||||
for (WorkFlowRelation workFlowRelation:workFlowRelationsTmp) { |
||||
if (!sourceIds.contains(workFlowRelation.getTargetWorkFlowId())) { |
||||
idsTmp.add(workFlowRelation.getTargetWorkFlowId()); |
||||
} |
||||
} |
||||
workFlowRelations.addAll(workFlowRelationsTmp); |
||||
getWorkFlowRelationRecursion(idsTmp, workFlowRelations,sourceIds); |
||||
} |
||||
} |
||||
} |
||||
|
||||
public Map<String, Object> queryWorkFlowLineageByIds(Set<Integer> ids,int projectId) { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
List<WorkFlowLineage> workFlowLineageList = workFlowLineageMapper.queryByIds(ids, projectId); |
||||
Map<String, Object> workFlowLists = new HashMap<>(); |
||||
Set<Integer> idsV = new HashSet<>(); |
||||
if (ids == null || ids.isEmpty()) { |
||||
for (WorkFlowLineage workFlowLineage:workFlowLineageList) { |
||||
idsV.add(workFlowLineage.getWorkFlowId()); |
||||
} |
||||
} else { |
||||
idsV = ids; |
||||
} |
||||
List<WorkFlowRelation> workFlowRelations = new ArrayList<>(); |
||||
Set<Integer> sourceIds = new HashSet<>(); |
||||
getWorkFlowRelationRecursion(idsV, workFlowRelations, sourceIds); |
||||
|
||||
Set<Integer> idSet = new HashSet<>(); |
||||
//If the incoming parameter is not empty, you need to add downstream workflow detail attributes
|
||||
if (ids != null && !ids.isEmpty()) { |
||||
for (WorkFlowRelation workFlowRelation : workFlowRelations) { |
||||
idSet.add(workFlowRelation.getTargetWorkFlowId()); |
||||
} |
||||
for (int id : ids) { |
||||
idSet.remove(id); |
||||
} |
||||
if (!idSet.isEmpty()) { |
||||
workFlowLineageList.addAll(workFlowLineageMapper.queryByIds(idSet, projectId)); |
||||
} |
||||
} |
||||
|
||||
workFlowLists.put(Constants.WORKFLOW_LIST, workFlowLineageList); |
||||
workFlowLists.put(Constants.WORKFLOW_RELATION_LIST, workFlowRelations); |
||||
result.put(Constants.DATA_LIST, workFlowLists); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
} |
@ -0,0 +1,179 @@
|
||||
/* |
||||
* Licensed to the Apache Software Foundation (ASF) under one or more |
||||
* contributor license agreements. See the NOTICE file distributed with |
||||
* this work for additional information regarding copyright ownership. |
||||
* The ASF licenses this file to You under the Apache License, Version 2.0 |
||||
* (the "License"); you may not use this file except in compliance with |
||||
* the License. You may obtain a copy of the License at |
||||
* |
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* |
||||
* Unless required by applicable law or agreed to in writing, software |
||||
* distributed under the License is distributed on an "AS IS" BASIS, |
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
||||
* See the License for the specific language governing permissions and |
||||
* limitations under the License. |
||||
*/ |
||||
|
||||
package org.apache.dolphinscheduler.api.service.impl; |
||||
|
||||
import static org.apache.dolphinscheduler.common.Constants.DEFAULT_WORKER_GROUP; |
||||
|
||||
import org.apache.dolphinscheduler.api.enums.Status; |
||||
import org.apache.dolphinscheduler.api.service.WorkerGroupService; |
||||
import org.apache.dolphinscheduler.api.utils.PageInfo; |
||||
import org.apache.dolphinscheduler.common.Constants; |
||||
import org.apache.dolphinscheduler.common.utils.CollectionUtils; |
||||
import org.apache.dolphinscheduler.common.utils.DateUtils; |
||||
import org.apache.dolphinscheduler.common.utils.StringUtils; |
||||
import org.apache.dolphinscheduler.dao.entity.User; |
||||
import org.apache.dolphinscheduler.dao.entity.WorkerGroup; |
||||
import org.apache.dolphinscheduler.dao.mapper.ProcessInstanceMapper; |
||||
import org.apache.dolphinscheduler.service.zk.ZookeeperCachedOperator; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Set; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
|
||||
/** |
||||
* worker group service impl |
||||
*/ |
||||
@Service |
||||
public class WorkerGroupServiceImpl extends BaseServiceImpl implements WorkerGroupService { |
||||
|
||||
private static final String NO_NODE_EXCEPTION_REGEX = "KeeperException$NoNodeException"; |
||||
|
||||
@Autowired |
||||
protected ZookeeperCachedOperator zookeeperCachedOperator; |
||||
|
||||
@Autowired |
||||
ProcessInstanceMapper processInstanceMapper; |
||||
|
||||
/** |
||||
* query worker group paging |
||||
* |
||||
* @param loginUser login user |
||||
* @param pageNo page number |
||||
* @param searchVal search value |
||||
* @param pageSize page size |
||||
* @return worker group list page |
||||
*/ |
||||
public Map<String, Object> queryAllGroupPaging(User loginUser, Integer pageNo, Integer pageSize, String searchVal) { |
||||
// list from index
|
||||
int fromIndex = (pageNo - 1) * pageSize; |
||||
// list to index
|
||||
int toIndex = (pageNo - 1) * pageSize + pageSize; |
||||
|
||||
Map<String, Object> result = new HashMap<>(); |
||||
if (isNotAdmin(loginUser, result)) { |
||||
return result; |
||||
} |
||||
|
||||
List<WorkerGroup> workerGroups = getWorkerGroups(true); |
||||
|
||||
List<WorkerGroup> resultDataList = new ArrayList<>(); |
||||
|
||||
if (CollectionUtils.isNotEmpty(workerGroups)) { |
||||
List<WorkerGroup> searchValDataList = new ArrayList<>(); |
||||
|
||||
if (StringUtils.isNotEmpty(searchVal)) { |
||||
for (WorkerGroup workerGroup : workerGroups) { |
||||
if (workerGroup.getName().contains(searchVal)) { |
||||
searchValDataList.add(workerGroup); |
||||
} |
||||
} |
||||
} else { |
||||
searchValDataList = workerGroups; |
||||
} |
||||
|
||||
if (searchValDataList.size() < pageSize) { |
||||
toIndex = (pageNo - 1) * pageSize + searchValDataList.size(); |
||||
} |
||||
resultDataList = searchValDataList.subList(fromIndex, toIndex); |
||||
} |
||||
|
||||
PageInfo<WorkerGroup> pageInfo = new PageInfo<>(pageNo, pageSize); |
||||
pageInfo.setTotalCount(resultDataList.size()); |
||||
pageInfo.setLists(resultDataList); |
||||
|
||||
result.put(Constants.DATA_LIST, pageInfo); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* query all worker group |
||||
* |
||||
* @return all worker group list |
||||
*/ |
||||
public Map<String, Object> queryAllGroup() { |
||||
Map<String, Object> result = new HashMap<>(); |
||||
|
||||
List<WorkerGroup> workerGroups = getWorkerGroups(false); |
||||
|
||||
Set<String> availableWorkerGroupSet = workerGroups.stream() |
||||
.map(WorkerGroup::getName) |
||||
.collect(Collectors.toSet()); |
||||
result.put(Constants.DATA_LIST, availableWorkerGroupSet); |
||||
putMsg(result, Status.SUCCESS); |
||||
return result; |
||||
} |
||||
|
||||
/** |
||||
* get worker groups |
||||
* |
||||
* @param isPaging whether paging |
||||
* @return WorkerGroup list |
||||
*/ |
||||
private List<WorkerGroup> getWorkerGroups(boolean isPaging) { |
||||
String workerPath = zookeeperCachedOperator.getZookeeperConfig().getDsRoot() + Constants.ZOOKEEPER_DOLPHINSCHEDULER_WORKERS; |
||||
List<WorkerGroup> workerGroups = new ArrayList<>(); |
||||
List<String> workerGroupList; |
||||
try { |
||||
workerGroupList = zookeeperCachedOperator.getChildrenKeys(workerPath); |
||||
} catch (Exception e) { |
||||
if (e.getMessage().contains(NO_NODE_EXCEPTION_REGEX)) { |
||||
if (!isPaging) { |
||||
//ignore noNodeException return Default
|
||||
WorkerGroup wg = new WorkerGroup(); |
||||
wg.setName(DEFAULT_WORKER_GROUP); |
||||
workerGroups.add(wg); |
||||
} |
||||
return workerGroups; |
||||
} else { |
||||
throw e; |
||||
} |
||||
} |
||||
|
||||
for (String workerGroup : workerGroupList) { |
||||
String workerGroupPath = String.format("%s/%s", workerPath, workerGroup); |
||||
List<String> childrenNodes = zookeeperCachedOperator.getChildrenKeys(workerGroupPath); |
||||
String timeStamp = ""; |
||||
for (int i = 0; i < childrenNodes.size(); i++) { |
||||
String ip = childrenNodes.get(i); |
||||
childrenNodes.set(i, ip.substring(0, ip.lastIndexOf(":"))); |
||||
timeStamp = ip.substring(ip.lastIndexOf(":")); |
||||
} |
||||
if (CollectionUtils.isNotEmpty(childrenNodes)) { |
||||
WorkerGroup wg = new WorkerGroup(); |
||||
wg.setName(workerGroup); |
||||
if (isPaging) { |
||||
wg.setIpList(childrenNodes); |
||||
String registeredIpValue = zookeeperCachedOperator.get(workerGroupPath + "/" + childrenNodes.get(0) + timeStamp); |
||||
wg.setCreateTime(DateUtils.stringToDate(registeredIpValue.split(",")[6])); |
||||
wg.setUpdateTime(DateUtils.stringToDate(registeredIpValue.split(",")[7])); |
||||
} |
||||
workerGroups.add(wg); |
||||
} |
||||
} |
||||
return workerGroups; |
||||
} |
||||
|
||||
} |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue